(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global["jessibuca-pro"] = factory()); })(this, (function () { 'use strict'; // 播放协议 const PLAYER_PLAY_PROTOCOL = { websocket: 1, fetch: 2, hls: 3, webrtc: 4, webTransport: 5, aliyunRtc: 6, ts: 7 }; const PLAYER_PLAY_PROTOCOL_LIST = ['', "websocket", "fetch", "hls", "webrtc", "webTransport", "aliyunRtc", "ts"]; const PLAYER_STREAM_TYPE = { fetch: 'fetch', hls: 'hls', websocket: 'websocket', webrtc: 'webrtc', webTransport: 'webTransport', worker: 'worker', aliyunRtc: 'aliyunRtc' }; // 播放 const PLAY_TYPE = { player: "player", playerAudio: 'playerAudio', playbackTF: 'playbackTF' }; const FILE_SUFFIX = { mp4: 'mp4', webm: 'webm', flv: 'flv', mov: 'mov' }; const DEMUX_TYPE = { flv: 'flv', m7s: 'm7s', hls: 'hls', webrtc: 'webrtc', webTransport: 'webTransport', nakedFlow: 'nakedFlow', fmp4: 'fmp4', mpeg4: 'mpeg4', aliyunRtc: 'aliyunRtc', ts: 'ts' }; const DEMUX_TYPE_SHOW = { flv: 'FLV', m7s: 'M7S', hls: 'HLS', fmp4: 'FMP4', mpeg4: 'MPEG4', webrtc: 'Webrtc', webTransport: 'WebTransport', nakedFlow: '裸流', aliyunRtc: 'AliyunRtc', ts: 'TS' }; const DECODE_TYPE = { mse: 'mse', wcs: 'wcs', offscreen: 'offscreen', wasm: 'wasm', simd: 'simd', mt: 'mt', webrtc: 'webrtc', hls: 'hls', aliyunRtc: 'aliyunRtc' }; const RENDER_TYPE = { canvas: 'canvas', video: 'video' }; const DEBUG_LEVEL = { debug: 'debug', warn: 'warn' }; const PTZ_ACTIVE_EVENT_TYPE = { click: 'click', mouseDownAndUp: 'mouseDownAndUp' }; const PLAYBACK_CONTROL_TYPE = { normal: 'normal', // default normal 24H simple: 'simple' }; const PLAYER_NAME = 'JessibucaPro'; const DEFAULT_PLAYBACK_FORWARD_MAX_RATE_DECODE_IFRAME = 4; // default playback forward max rate decode iframe const MSE_MAX_DELAY_TIME = 5; // 单位 秒 const MSE_DELAY_INCREASE_TIME = 3; // 单位 秒 const PLAYER_RESIZE_TIME = 500; const FLV_BUFFER_LARGE_SIZE = 1024 * 1024; // 1024kb = 1mb const AUDIO_CHANNEL_MAX = 2; // max support 2 channel const FRAME_TS_MAX_DIFF = 1000 * 60 * 60; // 1 hour const VIDEO_PAYLOAD_MIN_SIZE = 12; // video payload min size: 12 byte const CRYPTO_KEY_URL_PATH = '/crypto/'; const CONTAINER_DATA_SET_KEY = 'jbprov'; const VIDEO_ELEMENT_RETRY_PLAY_MAX_TIME = 3; // video element retry play max time const URL_OBJECT_CLEAR_TIME = 10 * 1000; // url object clear time const DEMUX_LOOP_INTERVAL_TIMES = 20; // 20ms const ERROR_MESSAGE_TIPS = { webglAlignmentError: 'Webgl 渲染失败', webglContextLostError: 'webgl 上下文丢失', mediaSourceH265NotSupport: '不支持硬解码H265', mediaSourceFull: '缓冲区已满', mediaSourceAppendBufferError: '初始化解码器失败', mseSourceBufferError: '解码失败', mseAddSourceBufferError: '初始化解码器失败', mediaSourceDecoderConfigurationError: '初始化解码器失败', mediaSourceTsIsMaxDiff: '流异常', mseWidthOrHeightChange: '流异常', mediaSourceAudioG711NotSupport: '硬解码不支持G711a/u音频格式', mediaSourceUseCanvasRenderPlayFailed: 'MediaSource解码使用canvas渲染失败', webcodecsH265NotSupport: '不支持硬解码H265', webcodecsUnsupportedConfigurationError: '初始化解码器失败', webcodecsDecodeConfigureError: '初始化解码器失败', webcodecsDecodeError: '解码失败', wcsWidthOrHeightChange: '解码失败', wasmDecodeError: '解码失败', simdDecodeError: '解码失败', wasmWidthOrHeightChange: '流异常', wasmUseVideoRenderError: 'video自动渲染失败', videoElementPlayingFailed: 'video自动渲染失败', simdH264DecodeVideoWidthIsTooLarge: '不支持该分辨率的视频', networkDelayTimeout: '网络超时重播失败', fetchError: '请求失败', streamEnd: '请求结束', websocketError: '请求失败', webrtcError: '请求失败', hlsError: '请求失败', decoderWorkerInitError: '初始化worker失败', videoElementPlayingFailedForWebrtc: 'video自动渲染失败', videoInfoError: '解析视频分辨率失败', webrtcStreamH265: 'webrtc不支持H265', delayTimeout: '播放超时重播失败', loadingTimeout: '加载超时重播失败', loadingTimeoutRetryEnd: '加载超时重播失败', delayTimeoutRetryEnd: '播放超时重播失败' }; const DEFAULT_JESSIBUCA_OPTIONS = { url: '', playbackConfig: {}, fullscreenWatermarkConfig: {}, // 全屏水印设置。 playType: PLAY_TYPE.player, playbackForwardMaxRateDecodeIFrame: DEFAULT_PLAYBACK_FORWARD_MAX_RATE_DECODE_IFRAME, playOptions: {}, isLive: true, isMulti: true, // 默认就是显示uuid isM7sCrypto: false, supportHls265: false, playFailedUseLastFrameShow: true, // 播放失败(异常原因)后,使用最后一帧显示。 playFailedAndPausedShowMessage: false, // 播放失败并暂停,是否显示提示信息。 pauseAndNextPlayUseLastFrameShow: false, widthOrHeightChangeReplayDelayTime: 0, // 单位秒,宽高变化重新播放延迟时间 isUseNewFullscreenWatermark: false }; // default player options const DEFAULT_PLAYER_OPTIONS = { playType: PLAY_TYPE.player, // inner container: '', // videoBuffer: 1 * 1000, // 1* 1000ms == 1 second videoBufferDelay: 1 * 1000, // 1 * 1000ms networkDelay: 10 * 1000, // 10 * 1000ms isResize: true, // isFullResize: false, // full resize isFlv: false, // flv isHls: false, // hls(inner) isFmp4: false, // fmp4, isFmp4Private: false, // 是否是fmp4私有协议 isWebrtc: false, // webrtc (inner) isWebrtcForZLM: false, // webrtc for ZLM isWebrtcForSRS: false, // webrtc for SRS isWebrtcForOthers: false, // webrtc for others isNakedFlow: false, // 是否是裸流(264、265) isMpeg4: false, // 是否是mpeg4 isAliyunRtc: false, // 是否是阿里云rtc isTs: false, // 是否是ts流 debug: false, // debug log debugLevel: DEBUG_LEVEL.warn, // log level debugUuid: '', // debug uuid (inner) isMulti: true, // 是否多实例播放(配置log用的) multiIndex: -1, // 多实例播放的index hotKey: false, // 快捷键 loadingTimeout: 10, // loading timeout 单位秒 heartTimeout: 10, // heart timeout 单位秒 timeout: 10, // timeout 单位秒 pageVisibilityHiddenTimeout: 5 * 60, // 5 * 60 = 5 minute loadingTimeoutReplay: true, // loading timeout replay heartTimeoutReplay: true, // heart timeout replay。 loadingTimeoutReplayTimes: 3, // loading timeout replay fail times heartTimeoutReplayTimes: 3, // heart timeout replay fail times heartTimeoutReplayUseLastFrameShow: true, // heart timeout replay use last frame replayUseLastFrameShow: true, // replay use last frame replayShowLoadingIcon: false, // replay show loading icon supportDblclickFullscreen: false, showBandwidth: false, // showPerformance: false, // 是否显示性能面板 mseCorrectTimeDuration: 20, // mse correct time duration 20ms mseCorrectAudioTimeDuration: 20, // mse correct audio time duration 20ms keepScreenOn: true, // keep screen on isNotMute: false, hasAudio: true, hasVideo: true, operateBtns: { fullscreen: false, screenshot: false, play: false, audio: false, record: false, ptz: false, quality: false, zoom: false, close: false, scale: false, performance: false, logSave: false, aiFace: false, aiObject: false, aiOcclusion: false, fullscreenFn: null, fullscreenExitFn: null, screenshotFn: null, playFn: null, pauseFn: null, recordFn: null, recordStopFn: null }, extendOperateBtns: [], contextmenuBtns: [], watermarkConfig: {}, // 局部水印设置 controlAutoHide: false, hasControl: false, // inner params loadingIcon: true, // is show loading icon loadingIconStyle: {}, // loading icon style loadingText: '', background: '', backgroundLoadingShow: true, // 加载过程中是否显示背景 loadingBackground: '', // 内部参数 重新播放过程中,显示播放失败前的最后一帧数据。 loadingBackgroundWidth: 0, // 内部参数 重新播放过程中,显示播放失败前的最后一帧数据 width。 loadingBackgroundHeight: 0, // 内部参数 重新播放过程中,显示播放失败前的最后一帧数据 height。 decoder: 'decoder-pro.js', decoderAudio: 'decoder-pro-audio.js', decoderHard: 'decoder-pro-hard.js', // 硬解码 decoderHardNotWasm: 'decoder-pro-hard-not-wasm.js', // 硬解码 wasmMp4RecorderDecoder: 'jessibuca-pro-mp4-recorder-decoder.js', // wasm mp4 recorder decoder decoderWASM: '', isDecoderUseCDN: false, url: '', // inner rotate: 0, mirrorRotate: 'none', // 镜像xx aspectRatio: 'default', // 比例支持 4:3, 16:9, playbackConfig: { playList: [], // {start:xx,end:xx,more:xx} fps: '', // fps值 showControl: true, controlType: PLAYBACK_CONTROL_TYPE.normal, // duration: 0, // duration 持续时间。 startTime: '', // 开始时间 showRateBtn: false, rateConfig: [], // 播放倍率切换,支持[{label:'正常',value:1},{label:'2倍',value:2}] showPrecision: '', // 初始化显示精度 'oneHour', 'halfHour', 'tenMin', 'fiveMin'。 showPrecisionBtn: true, // 是否显示精度切换按钮 isCacheBeforeDecodeForFpsRender: false, // rfs渲染时,是否在解码前缓存数据 uiUsePlaybackPause: false, // ui上面是否使用 playbackPause 方法 isPlaybackPauseClearCache: true, // playbackPause是否清除缓存数据 isUseFpsRender: false, // 是否使用固定的fps渲染,播放器会动态计算流的fps isUseLocalCalculateTime: false, // 是否使用本地时间来计算playback时间 localOneFrameTimestamp: 40, // 一帧 40ms, isUseLocalCalculateTime 为 true 生效。 supportWheel: false, // 是否支持滚动轴切换精度。 useWCS: false, // 是否使用wcs解码 useMSE: false // 是否使用mse解码 }, qualityConfig: [], // 支持 ['高清','超清','4K'] defaultStreamQuality: '', scaleConfig: ['拉伸', '缩放', '正常'], // text: '', forceNoOffscreen: true, // 默认是不采用 hiddenAutoPause: false, protocol: PLAYER_PLAY_PROTOCOL.fetch, // 内部参数 demuxType: DEMUX_TYPE.flv, // 内部参数 useWasm: false, //wasm 解码 (inner)默认 useMSE: false, // mse 解码 useWCS: false, // wcs 解码 useSIMD: true, // pro 默认优先使用wasm simd解码,不支持则使用wasm解码 useMThreading: false, // 是否使用多线程解码 wcsUseVideoRender: true, // wcs 是否使用 video 渲染 wcsUseWebgl2Render: true, // canvas 模式下 wcs 是否使用webgl2渲染 wasmUseVideoRender: true, // wasm 用video标签渲染 mseUseCanvasRender: false, //mse 用canvas标签渲染 hlsUseCanvasRender: false, // hls 用canvas标签渲染 webrtcUseCanvasRender: false, // webrtc 用canvas标签渲染 待定 useOffscreen: false, // 内部参数(废弃) useWebGPU: false, // 是否使用webgpu引擎 mseDecodeErrorReplay: true, // mse 解码失败重新播放 wcsDecodeErrorReplay: true, // wcs 解码失败重新播放 wasmDecodeErrorReplay: true, // 解码失败重新播放。 simdDecodeErrorReplay: true, // simd 解码失败重新播放。 simdDecodeErrorReplayType: DECODE_TYPE.wasm, // simd 解码失败重新播放类型: wasm 或者 simd autoWasm: true, // 自动降级到 wasm 模式 decoderErrorAutoWasm: true, // 解码失败自动降级到 wasm 模式 hardDecodingNotSupportAutoWasm: true, // 硬解码不支持自动降级到 wasm 模式 webglAlignmentErrorReplay: true, // webgl对齐失败重新播放。 webglContextLostErrorReplay: true, // webgl context lost 重新播放。 openWebglAlignment: false, // https://github.com/langhuihui/jessibuca/issues/152 syncAudioAndVideo: false, // 音视频同步 syncAudioAndVideoDiff: 500, // ms // playback config playbackDelayTime: 1000, // TF卡流播放延迟时间 Inner playbackFps: 25, // Inner playbackForwardMaxRateDecodeIFrame: DEFAULT_PLAYBACK_FORWARD_MAX_RATE_DECODE_IFRAME, // max rate render i frame , Inner playbackCurrentTimeMove: true, // Inner 录像流数据的当前时间是否跟着播放时长移动 useVideoRender: true, // 使用video标签渲染 useCanvasRender: false, // 使用canvas渲染 networkDelayTimeoutReplay: false, // 网络延迟重连 recordType: FILE_SUFFIX.mp4, checkFirstIFrame: true, // 检查第一帧是否是I帧 nakedFlowFps: 25, // 裸流fps audioEngine: null, // 音频引擎 isShowRecordingUI: true, // 是否显示录制中UI isShowZoomingUI: true, // 是否显示缩放中 useFaceDetector: false, // 使用人脸检测 useObjectDetector: false, // 使用物体检测 useImageDetector: false, // 使用图片检测 useOcclusionDetector: false, // 使用遮挡检测 ptzPositionConfig: {}, // position config ptzShowType: 'vertical', // vertical 垂直,level 水平 ptzClickType: PTZ_ACTIVE_EVENT_TYPE.click, // PTZ 点击类型 ptzStopEmitDelay: 0.3, // ptz停止后,停止发送指令的延迟时间(秒) ptzZoomShow: false, // ptz操作是否显示放大缩小操作 ptzApertureShow: false, // ptz操作是否显示光圈操作 ptzFocusShow: false, // ptz操作是否显示聚焦操作 ptzMoreArrowShow: false, // ptz操作是否显示更多箭头 ptzCruiseShow: false, // ptz操作是否显示巡航操作 ptzFogShow: false, // ptz操作是否显示透雾操作 ptzWiperShow: false, // ptz操作是否显示雨刷操作 ptzSupportDraggable: false, // ptz是否支持拖拽 // 微信安卓音频播放块大小 // 计算规则 48000 * ms / 1000 = size, // 48000 * 200 /1000 = 9600 播放时长200ms // 48000 * 175 /1000 = 8400 播放时长175ms // 48000 * 150 /1000 = 7200 播放时长150ms // 48000 * 125 /1000 = 6000 播放时长125ms // 48000 * 100 /1000 = 4800 播放时长100ms default // 48000 * 50 /1000 = 2400 播放时长50ms // 48000 * 25 /1000 = 1200 播放时长25ms // 48000 * 10 /1000 = 480 播放时长10ms weiXinInAndroidAudioBufferSize: 4800, isM7sCrypto: false, // 是否是m7s加密, m7sCryptoAudio: false, // m7s音频是否加密 isSm4Crypto: false, // 是否是sm4加密 isXorCrypto: false, // 是否是xor加密 sm4CryptoKey: '', // sm4加密key m7sCryptoKey: '', xorCryptoKey: '', // xor加密key cryptoKey: '', // 加密key cryptoIV: '', // 加密iv cryptoKeyUrl: '', // 加密key获取域名(for m7s) autoResize: false, // 自动调整大小 inner useWebFullScreen: false, // 使用web全屏(旋转播放器90度)(只会在移动端生效) ptsMaxDiff: 60 * 60, //单位(秒),默认值是1H aiFaceDetectLevel: 2, // 人脸检测等级(1-5) aiFaceDetectWidth: 240, // 人脸检测宽度(inner) aiFaceDetectShowRect: true, // 人脸检测显示框子 aiFaceDetectInterval: 1000, // 人脸检测时间间隔 ms aiFaceDetectRectConfig: {}, // 人脸检测框子配置 aiObjectDetectLevel: 2, // 物体检测等级(1-5) aiObjectDetectWidth: 240, // 物体检测宽度(inner) aiObjectDetectShowRect: true, // 物体检测显示框子 aiObjectDetectInterval: 1000, // 物品检测时间间隔 ms aiObjectDetectRectConfig: {}, // 物体检测框子配置 aiOcclusionDetectInterval: 1000, // 遮挡检测时间间隔 ms aiImageDetectDrop: false, // 图片检测是否丢弃不渲染 aiImageDetectActive: false, // 图片检测是否激活 videoRenderSupportScale: true, // video渲染支持Scale mediaSourceTsIsMaxDiffReplay: true, // 当ts间隔超过最大值之后,重新播放 controlHtml: '', // 自定义控制栏Html isH265: false, // 是否是h265, isWebrtcH265: false, // 是否是webrtc h265 supportLockScreenPlayAudio: true, // 是否支持锁屏播放音频(mobile,ipad 端) supportHls265: false, // 是否支持hls265 isEmitSEI: false, // 是否发送sei pauseAndNextPlayUseLastFrameShow: false, // pause->play 使用最后一帧显示 demuxUseWorker: true, // demux 使用worker 解析 其中, playFailedAndReplay: true, // 播放失败(异常原因)后,重新播放。(统一配置参数) showMessageConfig: ERROR_MESSAGE_TIPS, // 播放失败提示信息配置 videoElementPlayingFailedReplay: true, // video 播放失败,主要是自动播放的时候,video不允许播放的情况下,降级会canvas。 mp4RecordUseWasm: true, // mp4 record 使用 wasm // for mse mseAutoCleanupSourceBuffer: true, // 是否自动清理sourceBuffer(inner) mseAutoCleanupMaxBackwardDuration: 30, // 30s (inner) mseAutoCleanupMinBackwardDuration: 10, // 10s(inner) // replay widthOrHeightChangeReplay: true, // 宽高变化重新播放 simdH264DecodeVideoWidthIsTooLargeReplay: true, // simd h264 解码视频宽度过大重新播放 mediaSourceAudioG711NotSupportReplay: true, // mediaSource audio g711 not support replay (inner) mediaSourceAudioInitTimeoutReplay: true, // mediaSource audio init timeout replay (inner) mediaSourceUseCanvasRenderPlayFailedReplay: true, // mediaSource 解码使用canvas渲染失败重新播放 mediaSourceUseCanvasRenderPlayFailedReplayType: RENDER_TYPE.video, // mediaSource 解码使用canvas渲染失败重新播放类型:mse+video,或者wasm+canvas widthOrHeightChangeReplayDelayTime: 0, // 单位秒,宽高变化重新播放延迟时间 // 幽灵水印设置 ghostWatermarkConfig: { on: 5, // 幽灵模式开启时间 off: 5, // 幽灵模式关闭时间 content: '', // 幽灵水印内容 fontSize: 12, // 幽灵水印字体大小 color: 'white', // 幽灵水印颜色 opacity: 0.15, // 幽灵水印透明度 speed: 0.2 // 幽灵水印速度 }, // 动态水印设置 dynamicWatermarkConfig: { content: '', // 动态水印内容 speed: 0.2, // 动态水印速度 fontSize: 12, // 动态水印字体大小 color: 'white', // 动态水印颜色 opacity: 0.15 // 动态水印透明度 }, // 遇上时间戳相同的流,直接扔掉整个gop数据。 isDropSameTimestampGop: false, mseDecodeAudio: false, // mse 解码音频数据(支持aac/mp3) nakedFlowH265DemuxUseNew: true, // 裸流h265解封装用new的 //自定义扩展dom extendDomConfig: { html: '', showBeforePlay: false, //是否在播放前显示 showAfterLoading: true //是否在加载后显示 }, disableContextmenu: false, // 是否禁用右键菜单 websocket1006ErrorReplay: false, // ws1006错误重播 websocket1006ErrorReplayDelayTime: 0, //ws1006错误延迟重播时间 mseDecoderUseWorker: false, openMemoryLog: false, // 是否打开内存日志 mainThreadFetchUseWorker: true, // 主线程解码的时候fetch 是否通过单独的 worker fetch playFailedAndPausedShowPlayBtn: true, // 播放失败并暂停,是否显示播放按钮 mseCorrectionTimestamp: true // mse解码是否纠正时间戳。 }; const WORKER_CMD_TYPE = { init: 'init', initVideo: 'initVideo', render: 'render', playAudio: 'playAudio', initAudio: 'initAudio', kBps: 'kBps', decode: 'decode', audioCode: 'audioCode', audioNalu: 'audioNalu', audioAACSequenceHeader: 'audioAACSequenceHeader', videoCode: 'videoCode', videoCodec: 'videoCodec', videoNalu: 'videoNalu', videoPayload: 'videoPayload', audioPayload: 'audioPayload', wasmError: 'wasmError', workerFetch: 'workerFetch', iframeIntervalTs: 'iframeIntervalTs', isDropping: 'isDropping', workerEnd: 'workerEnd', networkDelay: 'networkDelay', playbackStreamVideoFps: 'playbackStreamVideoFps', wasmDecodeVideoNoResponseError: 'wasmDecodeVideoNoResponseError', wasmWidthOrHeightChange: 'wasmWidthOrHeightChange', simdDecodeError: 'simdDecodeError', simdH264DecodeVideoWidthIsTooLarge: 'simdH264DecodeVideoWidthIsTooLarge', websocketOpen: 'websocketOpen', closeEnd: 'closeEnd', tempStream: 'tempStream', videoSEI: 'videoSEI', // for flv recorder flvScriptData: 'flvScriptData', aacSequenceHeader: 'aacSequenceHeader', videoSequenceHeader: 'videoSequenceHeader', flvBufferData: 'flvBufferData', checkFirstIFrame: 'checkFirstIFrame', // for mse worker mseHandle: 'mseHandle', mseFirstRenderTime: 'mseFirstRenderTime', mseError: 'mseError' }; const WASM_ERROR = { invalidNalUnitSize: 'Invalid NAL unit size' // errorSplittingTheInputIntoNALUnits: 'Error splitting the input into NAL units' }; const MEDIA_TYPE = { audio: 1, video: 2 }; const FLV_MEDIA_TYPE = { audio: 8, video: 9, scriptData: 18 }; const WORKER_SEND_TYPE = { init: 'init', decode: 'decode', audioDecode: 'audioDecode', videoDecode: 'videoDecode', initAudioCodec: 'initAudioCodec', initVideoCodec: 'initVideoCodec', close: 'close', updateConfig: 'updateConfig', resetDecode: 'resetDecode', clearBuffer: 'clearBuffer', resetAudioDecode: 'resetAudioDecode', resetVideoDecode: 'resetVideoDecode', fetchStream: 'fetchStream', sendWsMessage: 'sendWsMessage', // for mse worker mseUpdateVideoTimestamp: 'mseUpdateVideoTimestamp' }; const WORKER_FETCH_CMD_TYPE = { fetch: 'fetch', destroy: 'destroy', destroyEnd: 'destroyEnd', buffer: 'buffer', fetchError: 'fetchError', fetchClose: 'fetchClose', fetchSuccess: 'fetchSuccess' }; // inner events const EVENTS = { fullscreen: 'fullscreen$2', webFullscreen: 'webFullscreen', decoderWorkerInit: 'decoderWorkerInit', play: 'play', playing: 'playing', pause: 'pause', mute: 'mute', load: 'load', loading: 'loading', zooming: 'zooming', videoInfo: 'videoInfo', timeUpdate: 'timeUpdate', audioInfo: "audioInfo", log: 'log', error: "error", kBps: 'kBps', timeout: 'timeout', delayTimeout: 'delayTimeout', delayTimeoutRetryEnd: 'delayTimeoutRetryEnd', loadingTimeout: 'loadingTimeout', loadingTimeoutRetryEnd: 'loadingTimeoutRetryEnd', stats: 'stats', performance: "performance", videoSmooth: 'videoSmooth', faceDetectActive: 'faceDetectActive', objectDetectActive: 'objectDetectActive', occlusionDetectActive: 'occlusionDetectActive', imageDetectActive: 'imageDetectActive', // record record: 'record', recording: 'recording', recordingTimestamp: 'recordingTimestamp', recordStart: 'recordStart', recordEnd: 'recordEnd', recordCreateError: 'recordCreateError', recordBlob: 'recordBlob', buffer: 'buffer', videoFrame: 'videoFrame', videoSEI: 'videoSEI', start: 'start', metadata: 'metadata', resize: 'resize', volumechange: 'volumechange', destroy: 'destroy', beforeDestroy: 'beforeDestroy', // stream streamEnd: 'streamEnd', streamRate: 'streamRate', streamAbps: 'streamAbps', streamVbps: 'streamVbps', streamDts: 'streamDts', streamSuccess: 'streamSuccess', streamMessage: 'streamMessage', streamError: 'streamError', streamStats: 'streamStats', // MSE mseSourceOpen: 'mseSourceOpen', mseSourceClose: 'mseSourceClose', mseSourceended: 'mseSourceended', mseSourceStartStreaming: 'mseSourceStartStreaming', mseSourceEndStreaming: 'mseSourceEndStreaming', mseSourceBufferError: 'mseSourceBufferError', mseAddSourceBufferError: 'mseAddSourceBufferError', mseSourceBufferBusy: 'mseSourceBufferBusy', mseSourceBufferFull: 'mseSourceBufferFull', // VIDEO videoWaiting: 'videoWaiting', videoTimeUpdate: 'videoTimeUpdate', videoSyncAudio: 'videoSyncAudio', // playToRenderTimes: 'playToRenderTimes', playbackTime: 'playbackTime', playbackTimestamp: 'playbackTimestamp', playbackTimeScroll: 'playbackTimeScroll', playbackPrecision: 'playbackPrecision', // inner playbackShowPrecisionChange: 'playbackShowPrecisionChange', playbackJustTime: 'playbackJustTime', playbackStats: 'playbackStats', playbackSeek: 'playbackSeek', playbackPause: 'playbackPause', playbackPauseOrResume: 'playbackPauseOrResume', playbackRateChange: 'playbackRateChange', playbackPreRateChange: 'playbackPreRateChange', ptz: 'ptz', streamQualityChange: 'streamQualityChange', visibilityChange: "visibilityChange", netBuf: 'netBuf', close: 'close', networkDelayTimeout: 'networkDelayTimeout', togglePerformancePanel: 'togglePerformancePanel', viewResizeChange: 'viewResizeChange', flvDemuxBufferSizeTooLarge: 'flvDemuxBufferSizeTooLarge', // talk talkGetUserMediaSuccess: 'talkGetUserMediaSuccess', talkGetUserMediaFail: 'talkGetUserMediaFail', talkGetUserMediaTimeout: 'talkGetUserMediaTimeout', talkStreamStart: 'talkStreamStart', talkStreamOpen: 'talkStreamOpen', talkStreamClose: 'talkStreamClose', talkStreamError: 'talkStreamError', talkStreamInactive: 'talkStreamInactive', webrtcDisconnect: 'webrtcDisconnect', webrtcFailed: 'webrtcFailed', webrtcClosed: 'webrtcClosed', webrtcOnConnectionStateChange: 'webrtcOnConnectionStateChange', webrtcOnIceConnectionStateChange: 'webrtcOnIceConnectionStateChange', // crash crashLog: 'crashLog', // dom focus: 'focus', blur: 'blur', visibilityHiddenTimeout: 'visibilityHiddenTimeout', // websocket websocketOpen: 'websocketOpen', websocketClose: 'websocketClose', websocketError: 'websocketError', websocketMessage: 'websocketMessage', // ai aiObjectDetectorInfo: 'aiObjectDetectorInfo', aiFaceDetectorInfo: 'aiFaceDetectorInfo', aiOcclusionDetectResult: 'aiOcclusionDetectResult', aiImageDetectResult: 'aiImageDetectResult', // 异常暂停 playFailedAndPaused: 'playFailedAndPaused', // audio audioResumeState: 'audioResumeState', // webrtc webrtcStreamH265: 'webrtcStreamH265', // flv flvMetaData: 'flvMetaData', // talk talkFailedAndStop: 'talkFailedAndStop', removeLoadingBgImage: 'removeLoadingBgImage', memoryLog: 'memoryLog', downloadMemoryLog: 'downloadMemoryLog', pressureObserverCpu: 'pressureObserverCpu', currentPts: 'currentPts' }; // jbPro events const JESSIBUCA_EVENTS = { load: EVENTS.load, timeUpdate: EVENTS.timeUpdate, videoInfo: EVENTS.videoInfo, audioInfo: EVENTS.audioInfo, error: EVENTS.error, kBps: EVENTS.kBps, start: EVENTS.start, timeout: EVENTS.timeout, loadingTimeout: EVENTS.loadingTimeout, loadingTimeoutRetryEnd: EVENTS.loadingTimeoutRetryEnd, delayTimeout: EVENTS.delayTimeout, delayTimeoutRetryEnd: EVENTS.delayTimeoutRetryEnd, fullscreen: 'fullscreen', webFullscreen: EVENTS.webFullscreen, play: EVENTS.play, pause: EVENTS.pause, mute: EVENTS.mute, stats: EVENTS.stats, performance: EVENTS.performance, // record recordingTimestamp: EVENTS.recordingTimestamp, recordStart: EVENTS.recordStart, recordCreateError: EVENTS.recordCreateError, recordEnd: EVENTS.recordEnd, recordBlob: EVENTS.recordBlob, playToRenderTimes: EVENTS.playToRenderTimes, // playback playbackSeek: EVENTS.playbackSeek, playbackStats: EVENTS.playbackStats, playbackTimestamp: EVENTS.playbackTimestamp, playbackPauseOrResume: EVENTS.playbackPauseOrResume, playbackPreRateChange: EVENTS.playbackPreRateChange, playbackRateChange: EVENTS.playbackRateChange, playbackShowPrecisionChange: EVENTS.playbackShowPrecisionChange, ptz: EVENTS.ptz, streamQualityChange: EVENTS.streamQualityChange, zooming: EVENTS.zooming, crashLog: EVENTS.crashLog, focus: EVENTS.focus, blur: EVENTS.blur, visibilityHiddenTimeout: EVENTS.visibilityHiddenTimeout, visibilityChange: EVENTS.visibilityChange, // websocket websocketOpen: EVENTS.websocketOpen, websocketClose: EVENTS.websocketClose, // networkDelayTimeout: EVENTS.networkDelayTimeout, // aiObjectDetectorInfo: EVENTS.aiObjectDetectorInfo, aiFaceDetectorInfo: EVENTS.aiFaceDetectorInfo, aiOcclusionDetectResult: EVENTS.aiOcclusionDetectResult, aiImageDetectResult: EVENTS.aiImageDetectResult, playFailedAndPaused: EVENTS.playFailedAndPaused, streamEnd: EVENTS.streamEnd, // audio audioResumeState: EVENTS.audioResumeState, // video videoSEI: EVENTS.videoSEI, // flv flvMetaData: EVENTS.flvMetaData, // webrtc webrtcOnConnectionStateChange: EVENTS.webrtcOnConnectionStateChange, webrtcOnIceConnectionStateChange: EVENTS.webrtcOnIceConnectionStateChange, currentPts: EVENTS.currentPts, videoSmooth: EVENTS.videoSmooth }; const TALK_EVENTS = { talkStreamClose: EVENTS.talkStreamClose, talkStreamError: EVENTS.talkStreamError, talkStreamInactive: EVENTS.talkStreamInactive, talkGetUserMediaTimeout: EVENTS.talkGetUserMediaTimeout, talkFailedAndStop: EVENTS.talkFailedAndStop }; ({ load: EVENTS.load, timeUpdate: EVENTS.timeUpdate, audioInfo: EVENTS.audioInfo, error: EVENTS.error, kBps: EVENTS.kBps, start: EVENTS.start, timeout: EVENTS.timeout, loadingTimeout: EVENTS.loadingTimeout, loadingTimeoutRetryEnd: EVENTS.loadingTimeoutRetryEnd, delayTimeout: EVENTS.delayTimeout, delayTimeoutRetryEnd: EVENTS.delayTimeoutRetryEnd, play: EVENTS.play, pause: EVENTS.pause, mute: EVENTS.mute, stats: EVENTS.stats, playToRenderTimes: EVENTS.playToRenderTimes, crashLog: EVENTS.crashLog, // websocket websocketOpen: EVENTS.websocketOpen, websocketClose: EVENTS.websocketClose, // playFailedAndPaused: EVENTS.playFailedAndPaused, // audio audioResumeState: EVENTS.audioResumeState }); const TALK_EVENTS_ERROR = { talkStreamError: EVENTS.talkStreamError, talkStreamClose: EVENTS.talkStreamClose }; const EVENTS_ERROR = { playError: 'playIsNotPauseOrUrlIsNull', fetchError: "fetchError", websocketError: 'websocketError', webcodecsH265NotSupport: 'webcodecsH265NotSupport', webcodecsDecodeError: 'webcodecsDecodeError', webcodecsUnsupportedConfigurationError: 'webcodecsUnsupportedConfigurationError', webcodecsDecodeConfigureError: 'webcodecsDecodeConfigureError', mediaSourceH265NotSupport: 'mediaSourceH265NotSupport', mediaSourceAudioG711NotSupport: 'mediaSourceAudioG711NotSupport', mediaSourceAudioInitTimeout: 'mediaSourceAudioInitTimeout', mediaSourceAudioNoDataTimeout: 'mediaSourceAudioNoDataTimeout', mediaSourceDecoderConfigurationError: 'mediaSourceDecoderConfigurationError', mediaSourceFull: EVENTS.mseSourceBufferFull, mseSourceBufferError: EVENTS.mseSourceBufferError, mseAddSourceBufferError: EVENTS.mseAddSourceBufferError, mediaSourceAppendBufferError: 'mediaSourceAppendBufferError', mediaSourceTsIsMaxDiff: 'mediaSourceTsIsMaxDiff', mediaSourceUseCanvasRenderPlayFailed: 'mediaSourceUseCanvasRenderPlayFailed', mediaSourceBufferedIsZeroError: 'mediaSourceBufferedIsZeroError', wasmDecodeError: 'wasmDecodeError', wasmUseVideoRenderError: 'wasmUseVideoRenderError', hlsError: 'hlsError', webrtcError: 'webrtcError', webrtcClosed: EVENTS.webrtcClosed, webrtcIceCandidateError: 'webrtcIceCandidateError', webglAlignmentError: 'webglAlignmentError', wasmWidthOrHeightChange: 'wasmWidthOrHeightChange', mseWidthOrHeightChange: 'mseWidthOrHeightChange', wcsWidthOrHeightChange: 'wcsWidthOrHeightChange', widthOrHeightChange: 'widthOrHeightChange', tallWebsocketClosedByError: 'tallWebsocketClosedByError', flvDemuxBufferSizeTooLarge: EVENTS.flvDemuxBufferSizeTooLarge, wasmDecodeVideoNoResponseError: 'wasmDecodeVideoNoResponseError', audioChannelError: 'audioChannelError', simdH264DecodeVideoWidthIsTooLarge: 'simdH264DecodeVideoWidthIsTooLarge', simdDecodeError: 'simdDecodeError', webglContextLostError: 'webglContextLostError', videoElementPlayingFailed: 'videoElementPlayingFailed', videoElementPlayingFailedForWebrtc: 'videoElementPlayingFailedForWebrtc', decoderWorkerInitError: 'decoderWorkerInitError', videoInfoError: 'videoInfoError', videoCodecIdError: 'videoCodecIdError', streamEnd: EVENTS.streamEnd, delayTimeout: EVENTS.delayTimeout, loadingTimeout: EVENTS.loadingTimeout, networkDelayTimeout: EVENTS.networkDelayTimeout, aliyunRtcError: 'aliyunRtcError', ...TALK_EVENTS_ERROR }; const WEBSOCKET_STATUS = { notConnect: 'notConnect', open: 'open', close: 'close', error: 'error' }; const SCREENSHOT_TYPE = { download: 'download', base64: 'base64', blob: 'blob' }; const RECORDING_TYPE = { download: 'download', blob: 'blob' }; const VIDEO_ENC_TYPE = { 7: 'H264(AVC)', // 12: 'H265(HEVC)', // 99: 'MPEG4' }; const VIDEO_ENC_CODE = { h264: 7, h265: 12, mpeg4: 99 }; const VIDEO_ENC_TYPE_SHOW = { h264: 'H264(AVC)', h265: 'H265(HEVC)' }; const AUDIO_ENC_CODE = { AAC: 10, ALAW: 7, MULAW: 8, MP3: 2 }; const AUDIO_ENC_CODE_SHOW = { AAC: 'AAC', ALAW: 'ALAW(g711a)', MULAW: 'MULAW(g711u)', MP3: 'MP3' }; const AUDIO_ENC_TYPE = { 10: 'AAC', 7: 'ALAW', 8: 'MULAW', 2: 'MP3' }; const H264_NAL_TYPE = { sps: 7, pps: 8, iFrame: 5, // kUnspecified: 0, kSliceNonIDR: 1, kSliceDPA: 2, kSliceDPB: 3, kSliceDPC: 4, kSliceIDR: 5, // iFrame kSliceSEI: 6, // sei 辅助增强信息 kSliceSPS: 7, // sps kSlicePPS: 8, // pps kSliceAUD: 9, kEndOfSequence: 10, kEndOfStream: 11, kFiller: 12, kSPSExt: 13, kReserved0: 14 }; /** * NAL_TRAIL_N = 0, * NAL_TRAIL_R = 1, * NAL_TSA_N = 2, * NAL_TSA_R = 3, * NAL_STSA_N = 4, * NAL_STSA_R = 5, * NAL_RADL_N = 6, * NAL_RADL_R = 7, * NAL_RASL_N = 8, * NAL_RASL_R = 9, * NAL_BLA_W_LP = 16, * NAL_BLA_W_RADL = 17, * NAL_BLA_N_LP = 18, * NAL_IDR_W_RADL = 19, * NAL_IDR_N_LP = 20, * NAL_CRA_NUT = 21, * NAL_VPS = 32, * NAL_SPS = 33, * NAL_PPS = 34, * NAL_AUD = 35, * NAL_EOS_NUT = 36, * NAL_EOB_NUT = 37, * NAL_FD_NUT = 38, * NAL_SEI_PREFIX = 39, * NAL_SEI_SUFFIX = 40, */ const H265_NAL_TYPE = { pFrame: 1, // 语义为被参考的后置图像,且非TSA、非STSA的SS编码数据 iFrame: 19, // IDR_W_RADL 语义为可能有RADL图像的IDR图像的SS编码数据 IDR kSliceIDR_W_RADL: 19, nLp: 20, // kSliceIDR_N_LP kSliceIDR_N_LP: 20, craNut: 21, //NAL_CRA_NUT kSliceCRA_NUT: 21, vps: 32, // 语义为视频参数集 kSliceVPS: 32, sps: 33, // 语义为序列参数集 kSliceSPS: 33, pps: 34, // 语义为图像参数集 kSlicePPS: 34, kSliceAUD: 35, sei: 39, //SEI 语义为补充增强信息 prefixSei: 39, // suffixSei: 40 // }; const CONTROL_HEIGHT = 38; const CONTROL_PLAYBACK_HEIGHT = 48; const SCALE_MODE_TYPE = { full: 0, // 视频画面完全填充canvas区域,画面会被拉伸 auto: 1, // 视频画面做等比缩放后,高或宽对齐canvas区域,画面不被拉伸,但有黑边 fullAuto: 2 // 视频画面做等比缩放后,完全填充canvas区域,画面不被拉伸,没有黑边,但画面显示不全 }; const CANVAS_RENDER_TYPE = { webcodecs: 'webcodecs', webgl: 'webgl', webgl2: 'webgl2', webgpu: 'webgpu', offscreen: 'offscreen', mse: 'mse', hls: 'hls', webrtc: 'webrtc' }; const ENCODED_VIDEO_TYPE = { key: 'key', delta: 'delta' }; const MP4_CODECS = { avc: 'video/mp4; codecs="avc1.64002A"', hev: 'video/mp4; codecs="hev1.1.6.L123.b0"', // others hev hev2: 'video/mp4;codecs="hev1.1.6.L120.90"', hev3: 'video/mp4;codecs="hev1.2.4.L120.90"', hev4: 'video/mp4;codecs="hev1.3.E.L120.90"', hev5: 'video/mp4;codecs="hev1.4.10.L120.90"' }; const MEDIA_SOURCE_STATE = { ended: 'ended', open: 'open', closed: 'closed' }; const AIDIO_MAX_VIDEO_DIFF = 2000; // ms // renderTimeDay once length const PLAYBACK_RENDER_ONCE_LENGTH = 2000; const HOT_KEY = { esc: 27, // arrowUp: 38, // arrowDown: 40 // }; // playback control time precision const PLAYBACK_CONTROL_TIME_PRECISION = { oneHour: 'oneHour', // 60min halfHour: 'halfHour', // 30min tenMin: 'tenMin', // 10min fiveMin: 'fiveMin' // 5min // oneMin: "oneMin" // 1min }; const PLAYBACK_CONTROL_TIME_PRECISION_CLASS = { oneHour: 'one-hour', // 60min halfHour: 'half-hour', // 30min tenMin: 'ten-min', // 10min fiveMin: 'five-min' // 5min // oneMin: 'one-min' // 1min }; // const PLAYBACK_CONTROL_TIME_PRECISION_ARRAY = ['oneHour', 'halfHour', 'tenMin', 'fiveMin']; const PTZ_ARROW = ['up', 'right', 'down', 'left', 'left-up', 'right-up', 'left-down', 'right-down']; const PTZ_OBJ = { up: "up", right: 'right', down: 'down', left: 'left', leftUp: 'leftUp', leftDown: 'leftDown', rightUp: 'rightUp', rightDown: 'rightDown', // stop stop: 'stop', fiStop: 'fiStop', // just for 聚焦,光圈, // 镜头 zoomExpand: 'zoomExpand', zoomNarrow: 'zoomNarrow', // 聚焦 apertureFar: 'apertureFar', apertureNear: 'apertureNear', // 光圈 focusFar: 'focusFar', focusNear: 'focusNear', // 巡航 cruiseStart: 'cruiseStart', cruiseStop: 'cruiseStop', // 透雾 fogOpen: 'fogOpen', fogClose: 'fogClose', // 雨刷 wiperOpen: 'wiperOpen', wiperClose: 'wiperClose' }; const TALK_ENC_TYPE = { g711a: 'g711a', g711u: 'g711u', pcm: 'pcm', opus: 'opus' }; const SCREENSHOT_FORMAT_TYPE = { png: 'image/png', jpeg: 'image/jpeg', webp: 'image/webp' }; const MEDIA_SOURCE_EVENTS = { sourceClose: 'sourceclose', sourceOpen: 'sourceopen', sourceended: 'sourceended', startstreaming: 'startstreaming', endstreaming: 'endstreaming', qualitychange: 'qualitychange' }; const VIDEO_ELEMENT_EVENTS = { canplay: 'canplay', waiting: "waiting", timeUpdate: 'timeupdate', ratechange: 'ratechange' }; const VIDEO_ENCODE_TYPE = { h264: 'avc', h265: 'hevc' }; const WCS_ERROR = { keyframeIsRequiredError: 'A key frame is required after configure() or flush()', canNotDecodeClosedCodec: "Cannot call 'decode' on a closed codec", unsupportedConfiguration: 'Unsupported configuration', decoderFailure: 'Decoder failure', decodingError: 'Decoding error', decoderError: 'Decoder error', hevcDecodingIsNotSupported: 'HEVC decoding is not supported' }; const FETCH_ERROR = { abortError: 'The user aborted a request', abortError2: 'AbortError', abort: 'AbortError' }; const PLAYER_STATUS = { loading: 'loading', playing: 'playing', paused: 'paused', destroy: 'destroy' }; const AVC_PACKET_TYPE = { sequenceHeader: 0, nalu: 1 }; const FRAME_TYPE = { keyFrame: 1, interFrame: 2 }; // RTP_PAYLOAD_TYPE_PCMU = 0, // g711u // RTP_PAYLOAD_TYPE_PCMA = 8, // g711a // RTP_PAYLOAD_TYPE_JPEG = 26, // RTP_PAYLOAD_TYPE_H264 = 96, // RTP_PAYLOAD_TYPE_H265 = 97, // RTP_PAYLOAD_TYPE_OPUS = 98, // RTP_PAYLOAD_TYPE_AAC = 99, // RTP_PAYLOAD_TYPE_G726 = 100, // RTP_PAYLOAD_TYPE_G726_16 = 101, // RTP_PAYLOAD_TYPE_G726_24 = 102, // RTP_PAYLOAD_TYPE_G726_32 = 103, // RTP_PAYLOAD_TYPE_G726_40 = 104, // RTP_PAYLOAD_TYPE_SPEEX = 105, const RTP_PAYLOAD_TYPE = { pcma: 8, g711a: 8, pcmu: 0, g711u: 0, jpeg: 26, h264: 96, h265: 97, opus: 98, aac: 99 }; const TALK_PACKET_TYPE = { empty: 'empty', // 裸的协议 rtp: 'rtp' }; const TALK_PACKAGE_TCP_SEND_TYPE = { tcp: 'tcp', udp: 'udp' }; const WEBSOCKET_EVENTS = { open: 'open', close: 'close', error: 'error', message: 'message' }; const TALK_ENGINE = { worklet: 'worklet', script: 'script' }; // default talk options const DEFAULT_TALK_OPTIONS = { encType: TALK_ENC_TYPE.g711a, packetType: TALK_PACKET_TYPE.rtp, // 默认的包个格式化 packetTcpSendType: TALK_PACKAGE_TCP_SEND_TYPE.tcp, // 默认的包个格式化 rtpSsrc: '0000000000', // 10 位 numberChannels: 1, // 采样通道 sampleRate: 8000, // 采样率 sampleBitsWidth: 16, // 采样精度 sendInterval: 20, // 发送间隔(ms) debug: false, debugLevel: DEBUG_LEVEL.warn, // debug level testMicrophone: false, // 测试麦克风获取 saveRtpToFile: false, // 保存 rtp 到文件 audioBufferLength: 160, // 默认走的是 20ms 8000 采样率 16 位精度 engine: TALK_ENGINE.worklet, // checkGetUserMediaTimeout: false, // 检测 getUserMedia 超时 getUserMediaTimeout: 10 * 1000, // getUserMedia 超时时间 10s audioConstraints: { // deviceId: '', // 设备 id latency: true, // noiseSuppression: true, // 降噪 autoGainControl: true, echoCancellation: true, // 回声消除 sampleRate: 48000, channelCount: 1 } }; const AUDIO_ENGINE_TYPE = { worklet: 'worklet', script: 'script', // default active: 'active' // }; const CONTROL_BUTTON_OPTIONS = { name: '', index: 0, icon: '', iconHover: '', iconTitle: '', activeIcon: '', activeIconHover: '', activeIconTitle: '', click: null, activeClick: null }; const MENU_ITEM_OPTIONS = { content: '', click: null, index: 0 }; const VIDEO_FRAME_TYPE = { keyFrame: 1, // interFrame: 2 // }; const HLS_EVENTS = { SUBTITLE_SEGMENTS: 'subtitle-segments', HLS_MANIFEST_LOADED: 'hls-manifest-loaded', // 主从m3u8格式时,master m3u8文件加载并解析完成后 HLS_LEVEL_LOADED: 'hls-level-loaded', // 二级m3u8加载并解析完成后,抛出解析后的结构 DEMUXED_TRACK: 'demuxed-track', FLV_SCRIPT_DATA: 'flv-script-data', METADATA_PARSED: 'metadata-parsed', // 视频元数据被第一次解析到时触发参数被分为两种类型 video 和 audio TTFB: 'ttfb', // 分片请求开始到接收到请求响应 LOAD_RETRY: 'load-retry', // 请求发生重试时触发 LOAD_START: 'load-start', // 分片在发送请求之前触发 SPEED: 'speed', // 当收集到网络速度统计时触发 LOAD_COMPLETE: 'load-complete', // 在请求完成后触发 LOAD_RESPONSE_HEADERS: 'load-response-headers', // 接收到请求响应头时触发 SEI: 'sei', // 当解析到视频 sei 时触发 SEI_IN_TIME: 'sei-in-time', // 根据当前视频播放时间抛出 sei,触发该事件表示该 sei 将在当前时间点展示。 NO_AUDIO_TRACK: 'no-audio-track', REMOVE_BUFFER: 'remove-buffer', BUFFEREOS: 'buffereos', SOURCEBUFFER_CREATED: 'sourcebuffer-created', SWITCH_URL_FAILED: 'switch-url-failed', // switchURL 方法调用后,切换 url 失败后触发。 SWITCH_URL_SUCCESS: 'switch-url-success', // switchURL 方法调用后,切换 url 成功后触发。 SUBTITLE_PLAYLIST: 'subtitle-playlist', STREAM_PARSED: 'stream-parsed', ERROR: 'error' }; const AI_FACE_DETECTOR_LEVEL = [0, 160, 240, 320, 480, 640]; const AI_OBJECT_DETECTOR_LEVEL = [0, 160, 240, 320, 480, 640]; const CPU_LEVEL = ['轻松', '正常', '较高', '高']; const LOADER_STATUS = { idle: 'idle', // connecting: 'connecting', // buffering: 'buffering', // error: 'error', // complete: 'complete' // }; const VIDEO_ERROR_CODE_DESC = { 1: 'MEDIA_ERR_ABORTED', // media err aborted 取回过程被用户中止 2: 'MEDIA_ERR_NETWORK', // media err network 当下载时发生错误 3: 'MEDIA_ERR_DECODE', // media err decode 当解码时发生错误 4: 'MEDIA_ERR_SRC_NOT_SUPPORTED' // media err src not supported 不支持音频/视频 }; const VIDEO_ERROR_MESSAGE = { videoDecoderInitializationFailed: 'video decoder initialization failed', audioDecoderError: 'audio packet' }; const AV_TRACK_ID = { video: 1, audio: 2 }; // rtmp 2023 spec const FRAME_HEADER_EX = 0x80; const PACKET_TYPE_EX = { PACKET_TYPE_SEQ_START: 0, PACKET_TYPE_FRAMES: 1, PACKET_TYPE_SEQ_END: 2, PACKET_TYPE_FRAMESX: 3, // PACKETTYPE_FRAMESX is an optimization to avoid sending composition time offsets of 0. See Enhanced RTMP spec. PACKET_TYPE_METADATA: 4 }; const FRAME_TYPE_EX = { FT_KEY: 0x10, FT_INTER: 0x20 }; const WEBRTC_STATS_TYPE = { CandidatePair: "candidate-pair", // todo Certificate: "certificate", // todo Codec: "codec", // todo Csrc: "csrc", DataCahnnel: "data-channel", InboundRtp: "inbound-rtp", // todo LocalCandidate: "local-candidate", // todo OutboundRtp: "outbound-rtp", PeerConnection: "peer-connection", Receiver: "receiver", RemoteCandidate: "remote-candidate", // todo RemoteInboundRtp: "remote-inbound-rtp", RemoteOutboundRtp: "remote-outbound-rtp", Sender: "sender", Stream: "stream", Track: "track", Transport: "transport", // todo MediaSource: "media-source" }; const TS_LARGE_AV_FIRST_FRAME_GAP = 90000 / 2; // 500ms const TS_AUDIO_GAP_OVERLAP_THRESHOLD_COUNT = 3; const TS_MAX_SILENT_FRAME_DURATION = 90000; // 1s const TS_AUDIO_EXCETION_LOG_EMIT_DURATION = 5 * 90000; // 5s const TS_MAX_VIDEO_FRAME_DURATION = 90000; // 1s const TS_MAX_DTS_DELTA_WITH_NEXT_CHUNK = 90000 / 2; // 500ms const RETRY_TYPE = { ws1006: 'ws1006', mseDecodeError: 'mseDecodeError', wcsDecodeError: 'wcsDecodeError' }; class Debug { constructor(master) { this.log = function (name) { if (master._opt.debug && master._opt.debugLevel == DEBUG_LEVEL.debug) { const prefix = master._opt.debugUuid ? `[${master._opt.debugUuid}]` : ''; for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } console.log(`JbPro${prefix}[\u2705\u2705\u2705][${name}]`, ...args); } }; this.warn = function (name) { if (master._opt.debug && (master._opt.debugLevel == DEBUG_LEVEL.debug || master._opt.debugLevel == DEBUG_LEVEL.warn)) { const prefix = master._opt.debugUuid ? `[${master._opt.debugUuid}]` : ''; for (var _len2 = arguments.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) { args[_key2 - 1] = arguments[_key2]; } console.log(`JbPro${prefix}[\u2757\u2757\u2757][${name}]`, ...args); } }; this.error = function (name) { const prefix = master._opt.debugUuid ? `[${master._opt.debugUuid}]` : ''; for (var _len3 = arguments.length, args = new Array(_len3 > 1 ? _len3 - 1 : 0), _key3 = 1; _key3 < _len3; _key3++) { args[_key3 - 1] = arguments[_key3]; } console.error(`JbPro${prefix}[\u274C\u274C\u274C][${name}]`, ...args); // if (master.addMemoryLog) { // master.addMemoryLog(`[${name}]`, ...args); // } }; } } var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; function unwrapExports (x) { return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } function createCommonjsModule(fn, module) { return module = { exports: {} }, fn(module, module.exports), module.exports; } var screenfull = createCommonjsModule(function (module) { /*! * screenfull * v5.1.0 - 2020-12-24 * (c) Sindre Sorhus; MIT License */ (function () { var document = typeof window !== 'undefined' && typeof window.document !== 'undefined' ? window.document : {}; var isCommonjs = module.exports; var fn = (function () { var val; var fnMap = [ [ 'requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror' ], // New WebKit [ 'webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror' ], // Old WebKit [ 'webkitRequestFullScreen', 'webkitCancelFullScreen', 'webkitCurrentFullScreenElement', 'webkitCancelFullScreen', 'webkitfullscreenchange', 'webkitfullscreenerror' ], [ 'mozRequestFullScreen', 'mozCancelFullScreen', 'mozFullScreenElement', 'mozFullScreenEnabled', 'mozfullscreenchange', 'mozfullscreenerror' ], [ 'msRequestFullscreen', 'msExitFullscreen', 'msFullscreenElement', 'msFullscreenEnabled', 'MSFullscreenChange', 'MSFullscreenError' ] ]; var i = 0; var l = fnMap.length; var ret = {}; for (; i < l; i++) { val = fnMap[i]; if (val && val[1] in document) { for (i = 0; i < val.length; i++) { ret[fnMap[0][i]] = val[i]; } return ret; } } return false; })(); var eventNameMap = { change: fn.fullscreenchange, error: fn.fullscreenerror }; var screenfull = { request: function (element, options) { return new Promise(function (resolve, reject) { var onFullScreenEntered = function () { this.off('change', onFullScreenEntered); resolve(); }.bind(this); this.on('change', onFullScreenEntered); element = element || document.documentElement; var returnPromise = element[fn.requestFullscreen](options); if (returnPromise instanceof Promise) { returnPromise.then(onFullScreenEntered).catch(reject); } }.bind(this)); }, exit: function () { return new Promise(function (resolve, reject) { if (!this.isFullscreen) { resolve(); return; } var onFullScreenExit = function () { this.off('change', onFullScreenExit); resolve(); }.bind(this); this.on('change', onFullScreenExit); var returnPromise = document[fn.exitFullscreen](); if (returnPromise instanceof Promise) { returnPromise.then(onFullScreenExit).catch(reject); } }.bind(this)); }, toggle: function (element, options) { return this.isFullscreen ? this.exit() : this.request(element, options); }, onchange: function (callback) { this.on('change', callback); }, onerror: function (callback) { this.on('error', callback); }, on: function (event, callback) { var eventName = eventNameMap[event]; if (eventName) { document.addEventListener(eventName, callback, false); } }, off: function (event, callback) { var eventName = eventNameMap[event]; if (eventName) { document.removeEventListener(eventName, callback, false); } }, raw: fn }; if (!fn) { if (isCommonjs) { module.exports = {isEnabled: false}; } else { window.screenfull = {isEnabled: false}; } return; } Object.defineProperties(screenfull, { isFullscreen: { get: function () { return Boolean(document[fn.fullscreenElement]); } }, element: { enumerable: true, get: function () { return document[fn.fullscreenElement]; } }, isEnabled: { enumerable: true, get: function () { // Coerce to boolean in case of old WebKit return Boolean(document[fn.fullscreenEnabled]); } } }); if (isCommonjs) { module.exports = screenfull; } else { window.screenfull = screenfull; } })(); }); screenfull.isEnabled; // Exponential-Golomb buffer decoder class ExpGolomb$1 { constructor(uint8array) { this._buffer = uint8array; this._buffer_index = 0; this._total_bytes = uint8array.byteLength; this._total_bits = uint8array.byteLength * 8; this._current_word = 0; this._current_word_bits_left = 0; } destroy() { this._buffer = null; } _fillCurrentWord() { let buffer_bytes_left = this._total_bytes - this._buffer_index; if (buffer_bytes_left <= 0) { console.error('ExpGolomb: _fillCurrentWord() but no bytes available', this._total_bytes, this._buffer_index); return; } let bytes_read = Math.min(4, buffer_bytes_left); let word = new Uint8Array(4); word.set(this._buffer.subarray(this._buffer_index, this._buffer_index + bytes_read)); this._current_word = new DataView(word.buffer).getUint32(0, false); this._buffer_index += bytes_read; this._current_word_bits_left = bytes_read * 8; } readBits(bits) { if (bits > 32) { console.error('ExpGolomb: readBits() bits exceeded max 32bits!'); } if (bits <= this._current_word_bits_left) { let result = this._current_word >>> 32 - bits; this._current_word <<= bits; this._current_word_bits_left -= bits; return result; } let result = this._current_word_bits_left ? this._current_word : 0; result = result >>> 32 - this._current_word_bits_left; let bits_need_left = bits - this._current_word_bits_left; this._fillCurrentWord(); let bits_read_next = Math.min(bits_need_left, this._current_word_bits_left); let result2 = this._current_word >>> 32 - bits_read_next; this._current_word <<= bits_read_next; this._current_word_bits_left -= bits_read_next; result = result << bits_read_next | result2; return result; } readBool() { return this.readBits(1) === 1; } readByte() { return this.readBits(8); } _skipLeadingZero() { let zero_count; for (zero_count = 0; zero_count < this._current_word_bits_left; zero_count++) { if (0 !== (this._current_word & 0x80000000 >>> zero_count)) { this._current_word <<= zero_count; this._current_word_bits_left -= zero_count; return zero_count; } } this._fillCurrentWord(); return zero_count + this._skipLeadingZero(); } readUEG() { // unsigned exponential golomb let leading_zeros = this._skipLeadingZero(); return this.readBits(leading_zeros + 1) - 1; } readSEG() { // signed exponential golomb let value = this.readUEG(); if (value & 0x01) { return value + 1 >>> 1; } else { return -1 * (value >>> 1); } } } /** * 生成aac的asc头 * @param profile * @param sampleRate 这个是采样率的索引index, 不是采样率本身。 * @param channel * @returns {Uint8Array} */ function aacEncoderConfigurationRecordV2(_ref) { let { profile, sampleRate, channel } = _ref; const config1 = profile << 3 | (sampleRate & 0xe) >> 1; const config2 = (sampleRate & 0x1) << 7 | channel << 3; // 0xAF >> 4 === 10 const temp = [0xAF, 0x00, config1, config2]; const arrayBuffer = new Uint8Array(temp); return arrayBuffer; } /** * * @param payload * @returns {boolean} */ function isAacCodecPacket(payload) { return isAAC(payload) && payload[1] === AVC_PACKET_TYPE.sequenceHeader; } function isAAC(payload) { return payload[0] >> 4 === AUDIO_ENC_CODE.AAC; } const FREQ = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; const MPEG4SamplingFrequencies$1 = FREQ; const AAC_FREQ_LIST = FREQ; function getSilentFrame(codec, channelCount) { switch (codec) { case 'mp4a.40.2': if (channelCount === 1) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]); } if (channelCount === 2) { return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]); } if (channelCount === 3) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]); } if (channelCount === 4) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]); } if (channelCount === 5) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]); } if (channelCount === 6) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]); } break; default: if (channelCount === 1) { return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } if (channelCount === 2) { return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } if (channelCount === 3) { return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } break; } } function getFrameDuration$1(rate) { let timescale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 90000; return 1024 * timescale / rate; } function parseADTS(data, pts) { const len = data.length; let i = 0; while (i + 2 < len) { if (data[i] === 0xff && (data[i + 1] & 0xf6) === 0xf0) { break; } i++; } if (i >= len) return; const skip = i; const frames = []; const samplingFrequencyIndex = (data[i + 2] & 0x3c) >>> 2; const sampleRate = FREQ[samplingFrequencyIndex]; if (!sampleRate) throw new Error(`Invalid sampling index: ${samplingFrequencyIndex}`); // profile const objectType = ((data[i + 2] & 0xc0) >>> 6) + 1; // channel const channelCount = (data[i + 2] & 1) << 2 | (data[i + 3] & 0xc0) >>> 6; let protectionSkipBytes; let frameLength; let frameIndex = 0; const duration = getFrameDuration$1(sampleRate); while (i + 7 < len) { if (data[i] !== 0xff || (data[i + 1] & 0xF6) !== 0xf0) { i++; continue; } frameLength = (data[i + 3] & 0x03) << 11 | data[i + 4] << 3 | (data[i + 5] & 0xe0) >> 5; if (len - i < frameLength) break; protectionSkipBytes = (~data[i + 1] & 0x01) * 2; frames.push({ pts: pts + frameIndex * duration, data: data.subarray(i + 7 + protectionSkipBytes, i + frameLength) }); frameIndex++; i += frameLength; } return { skip, remaining: i >= len ? undefined : data.subarray(i), frames, samplingFrequencyIndex, sampleRate, objectType, channelCount, originCodec: `mp4a.40.${objectType}` }; } const _mpegSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; function parseAACAudioSpecificConfig(arrayBuffer) { let array = new Uint8Array(arrayBuffer); let config = null; /* Audio Object Type: 0: Null 1: AAC Main 2: AAC LC 3: AAC SSR (Scalable Sample Rate) 4: AAC LTP (Long Term Prediction) 5: HE-AAC / SBR (Spectral Band Replication) 6: AAC Scalable */ let audioObjectType = 0; let originalAudioObjectType = 0; let samplingIndex = 0; let extensionSamplingIndex = null; // 5 bits audioObjectType = originalAudioObjectType = array[0] >>> 3; // 4 bits samplingIndex = (array[0] & 0x07) << 1 | array[1] >>> 7; if (samplingIndex < 0 || samplingIndex >= _mpegSamplingRates.length) { console.error('Flv: AAC invalid sampling frequency index!'); return; } let samplingFrequence = _mpegSamplingRates[samplingIndex]; // 4 bits let channelConfig = (array[1] & 0x78) >>> 3; if (channelConfig < 0 || channelConfig >= 8) { console.log('Flv: AAC invalid channel configuration'); return; } if (audioObjectType === 5) { // HE-AAC? // 4 bits extensionSamplingIndex = (array[1] & 0x07) << 1 | array[2] >>> 7; // 5 bits (array[2] & 0x7C) >>> 2; } // workarounds for various browsers let userAgent = self.navigator.userAgent.toLowerCase(); if (userAgent.indexOf('firefox') !== -1) { // firefox: use SBR (HE-AAC) if freq less than 24kHz if (samplingIndex >= 6) { audioObjectType = 5; config = new Array(4); extensionSamplingIndex = samplingIndex - 3; } else { // use LC-AAC audioObjectType = 2; config = new Array(2); extensionSamplingIndex = samplingIndex; } } else if (userAgent.indexOf('android') !== -1) { // android: always use LC-AAC audioObjectType = 2; config = new Array(2); extensionSamplingIndex = samplingIndex; } else { // for other browsers, e.g. chrome... // Always use HE-AAC to make it easier to switch aac codec profile audioObjectType = 5; extensionSamplingIndex = samplingIndex; config = new Array(4); if (samplingIndex >= 6) { extensionSamplingIndex = samplingIndex - 3; } else if (channelConfig === 1) { // Mono channel audioObjectType = 2; config = new Array(2); extensionSamplingIndex = samplingIndex; } } config[0] = audioObjectType << 3; config[0] |= (samplingIndex & 0x0F) >>> 1; config[1] = (samplingIndex & 0x0F) << 7; config[1] |= (channelConfig & 0x0F) << 3; if (audioObjectType === 5) { config[1] |= (extensionSamplingIndex & 0x0F) >>> 1; config[2] = (extensionSamplingIndex & 0x01) << 7; // extended audio object type: force to 2 (LC-AAC) config[2] |= 2 << 2; config[3] = 0; } return { audioType: 'aac', config: config, sampleRate: samplingFrequence, channelCount: channelConfig, objectType: audioObjectType, codec: 'mp4a.40.' + audioObjectType, originalCodec: 'mp4a.40.' + originalAudioObjectType }; } class Bitop$1 { constructor(buffer) { this.buffer = buffer; this.buflen = buffer.length; this.bufpos = 0; this.bufoff = 0; this.iserro = false; } read(n) { let v = 0; let d = 0; while (n) { if (n < 0 || this.bufpos >= this.buflen) { this.iserro = true; return 0; } this.iserro = false; d = this.bufoff + n > 8 ? 8 - this.bufoff : n; v <<= d; v += this.buffer[this.bufpos] >> 8 - this.bufoff - d & 0xff >> 8 - d; this.bufoff += d; n -= d; if (this.bufoff == 8) { this.bufpos++; this.bufoff = 0; } } return v; } look(n) { let p = this.bufpos; let o = this.bufoff; let v = this.read(n); this.bufpos = p; this.bufoff = o; return v; } read_golomb() { let n; for (n = 0; this.read(1) == 0 && !this.iserro; n++); return (1 << n) + this.read(n) - 1; } } function getObjectType(bitop) { let audioObjectType = bitop.read(5); if (audioObjectType === 31) { audioObjectType = bitop.read(6) + 32; } return audioObjectType; } function getSampleRate(bitop, info) { info.sampling_index = bitop.read(4); return info.sampling_index == 0x0f ? bitop.read(24) : AAC_SAMPLE_RATE[info.sampling_index]; } const AAC_SAMPLE_RATE = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350, 0, 0, 0]; const AAC_CHANNELS = [0, 1, 2, 3, 4, 5, 6, 8]; function readAACSpecificConfig(aacSequenceHeader) { let info = {}; let bitop = new Bitop$1(aacSequenceHeader); bitop.read(16); info.object_type = getObjectType(bitop); info.sample_rate = getSampleRate(bitop, info); info.chan_config = bitop.read(4); if (info.chan_config < AAC_CHANNELS.length) { info.channels = AAC_CHANNELS[info.chan_config]; } info.sbr = -1; info.ps = -1; if (info.object_type == 5 || info.object_type == 29) { if (info.object_type == 29) { info.ps = 1; } info.ext_object_type = 5; info.sbr = 1; info.sample_rate = getSampleRate(bitop, info); info.object_type = getObjectType(bitop); } return { ...info, channelCount: info.channels, sampleRate: info.sample_rate }; } class AACADTSParser { constructor(data) { this.data_ = data; this.eof_flag_ = false; this.current_syncword_offset_ = this.findNextSyncwordOffset(0); if (this.eof_flag_) { console.error('Could not found ADTS syncword until payload end'); } } findNextSyncwordOffset(syncword_offset) { let i = syncword_offset; let data = this.data_; while (true) { if (i + 7 >= data.byteLength) { this.eof_flag_ = true; return data.byteLength; } // search 12-bit 0xFFF syncword let syncword = (data[i + 0] << 8 | data[i + 1]) >>> 4; if (syncword === 0xFFF) { return i; } else { i++; } } } readNextAACFrame() { let data = this.data_; let aac_frame = null; while (aac_frame == null) { if (this.eof_flag_) { break; } let syncword_offset = this.current_syncword_offset_; let offset = syncword_offset; // adts_fixed_header() // syncword 0xFFF: 12-bit let ID = (data[offset + 1] & 0x08) >>> 3; let layer = (data[offset + 1] & 0x06) >>> 1; let protection_absent = data[offset + 1] & 0x01; let profile = (data[offset + 2] & 0xC0) >>> 6; let sampling_frequency_index = (data[offset + 2] & 0x3C) >>> 2; let channel_configuration = (data[offset + 2] & 0x01) << 2 | (data[offset + 3] & 0xC0) >>> 6; // adts_variable_header() let aac_frame_length = (data[offset + 3] & 0x03) << 11 | data[offset + 4] << 3 | (data[offset + 5] & 0xE0) >>> 5; data[offset + 6] & 0x03; if (offset + aac_frame_length > this.data_.byteLength) { // data not enough for extracting last sample this.eof_flag_ = true; this.has_last_incomplete_data = true; break; } let adts_header_length = protection_absent === 1 ? 7 : 9; let adts_frame_payload_length = aac_frame_length - adts_header_length; offset += adts_header_length; let next_syncword_offset = this.findNextSyncwordOffset(offset + adts_frame_payload_length); this.current_syncword_offset_ = next_syncword_offset; if (ID !== 0 && ID !== 1 || layer !== 0) { // invalid adts frame ? continue; } let frame_data = data.subarray(offset, offset + adts_frame_payload_length); aac_frame = {}; aac_frame.audio_object_type = profile + 1; aac_frame.sampling_freq_index = sampling_frequency_index; aac_frame.sampling_frequency = MPEG4SamplingFrequencies$1[sampling_frequency_index]; aac_frame.channel_config = channel_configuration; aac_frame.data = frame_data; } return aac_frame; } hasIncompleteData() { return this.has_last_incomplete_data; } getIncompleteData() { if (!this.has_last_incomplete_data) { return null; } return this.data_.subarray(this.current_syncword_offset_); } } class AACLOASParser { constructor(data) { this.data_ = data; this.eof_flag_ = false; this.current_syncword_offset_ = this.findNextSyncwordOffset(0); if (this.eof_flag_) { console.error('Could not found ADTS syncword until payload end'); } } findNextSyncwordOffset(syncword_offset) { let i = syncword_offset; let data = this.data_; while (true) { if (i + 1 >= data.byteLength) { this.eof_flag_ = true; return data.byteLength; } // search 12-bit 0xFFF syncword let syncword = data[i + 0] << 3 | data[i + 1] >>> 5; if (syncword === 0x2B7) { return i; } else { i++; } } } getLATMValue(gb) { let bytesForValue = gb.readBits(2); let value = 0; for (let i = 0; i <= bytesForValue; i++) { value = value << 8; value = value | gb.readByte(); } return value; } readNextAACFrame(privious) { let data = this.data_; let aac_frame = null; while (aac_frame == null) { if (this.eof_flag_) { break; } let syncword_offset = this.current_syncword_offset_; let offset = syncword_offset; let audioMuxLengthBytes = (data[offset + 1] & 0x1F) << 8 | data[offset + 2]; if (offset + 3 + audioMuxLengthBytes >= this.data_.byteLength) { // data not enough for extracting last sample this.eof_flag_ = true; this.has_last_incomplete_data = true; break; } // AudioMuxElement(1) let gb = new ExpGolomb$1(data.subarray(offset + 3, offset + 3 + audioMuxLengthBytes)); let useSameStreamMux = gb.readBool(); let streamMuxConfig = null; if (!useSameStreamMux) { let audioMuxVersion = gb.readBool(); let audioMuxVersionA = audioMuxVersion && gb.readBool(); if (audioMuxVersionA) { console.error('audioMuxVersionA is Not Supported'); gb.destroy(); break; } if (audioMuxVersion) { this.getLATMValue(gb); } let allStreamsSameTimeFraming = gb.readBool(); if (!allStreamsSameTimeFraming) { console.error('allStreamsSameTimeFraming zero is Not Supported'); gb.destroy(); break; } let numSubFrames = gb.readBits(6); if (numSubFrames !== 0) { console.error('more than 2 numSubFrames Not Supported'); gb.destroy(); break; } let numProgram = gb.readBits(4); if (numProgram !== 0) { console.error('more than 2 numProgram Not Supported'); gb.destroy(); break; } let numLayer = gb.readBits(3); if (numLayer !== 0) { console.error('more than 2 numLayer Not Supported'); gb.destroy(); break; } let fillBits = audioMuxVersion ? this.getLATMValue(gb) : 0; let audio_object_type = gb.readBits(5); fillBits -= 5; let sampling_freq_index = gb.readBits(4); fillBits -= 4; let channel_config = gb.readBits(4); fillBits -= 4; gb.readBits(3); fillBits -= 3; // GA Specfic Config if (fillBits > 0) { gb.readBits(fillBits); } let frameLengthType = gb.readBits(3); if (frameLengthType === 0) { gb.readByte(); } else { console.error(`frameLengthType = ${frameLengthType}. Only frameLengthType = 0 Supported`); gb.destroy(); break; } let otherDataPresent = gb.readBool(); if (otherDataPresent) { if (audioMuxVersion) { this.getLATMValue(gb); } else { let otherDataLenBits = 0; while (true) { otherDataLenBits = otherDataLenBits << 8; let otherDataLenEsc = gb.readBool(); let otherDataLenTmp = gb.readByte(); otherDataLenBits += otherDataLenTmp; if (!otherDataLenEsc) { break; } } console.log(otherDataLenBits); } } let crcCheckPresent = gb.readBool(); if (crcCheckPresent) { gb.readByte(); } streamMuxConfig = {}; streamMuxConfig.audio_object_type = audio_object_type; streamMuxConfig.sampling_freq_index = sampling_freq_index; streamMuxConfig.sampling_frequency = MPEG4SamplingFrequencies$1[streamMuxConfig.sampling_freq_index]; streamMuxConfig.channel_config = channel_config; streamMuxConfig.other_data_present = otherDataPresent; } else if (privious == null) { console.warn('StreamMuxConfig Missing'); this.current_syncword_offset_ = this.findNextSyncwordOffset(offset + 3 + audioMuxLengthBytes); gb.destroy(); continue; } else { streamMuxConfig = privious; } let length = 0; while (true) { let tmp = gb.readByte(); length += tmp; if (tmp !== 0xFF) { break; } } let aac_data = new Uint8Array(length); for (let i = 0; i < length; i++) { aac_data[i] = gb.readByte(); } aac_frame = {}; aac_frame.audio_object_type = streamMuxConfig.audio_object_type; aac_frame.sampling_freq_index = streamMuxConfig.sampling_freq_index; aac_frame.sampling_frequency = MPEG4SamplingFrequencies$1[streamMuxConfig.sampling_freq_index]; aac_frame.channel_config = streamMuxConfig.channel_config; aac_frame.other_data_present = streamMuxConfig.other_data_present; aac_frame.data = aac_data; this.current_syncword_offset_ = this.findNextSyncwordOffset(offset + 3 + audioMuxLengthBytes); } return aac_frame; } hasIncompleteData() { return this.has_last_incomplete_data; } getIncompleteData() { if (!this.has_last_incomplete_data) { return null; } return this.data_.subarray(this.current_syncword_offset_); } } function readBig32$1(data) { let i = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; return (data[i] << 24 >>> 0) + (data[i + 1] << 16) + (data[i + 2] << 8) + (data[i + 3] || 0); } /** * parse annexb format * --Annex-B格式 也叫MPEG-2 transport stream format格式(ts格式), ElementaryStream格式。 * @param data * @returns {*[]} */ function parseAnnexB(data) { const len = data.length; let start = 2; let end = 0; while (data[start] !== null && data[start] !== undefined && data[start] !== 1) { start++; } start++; end = start + 2; if (end >= len) return []; const units = []; while (end < len) { switch (data[end]) { case 0: if (data[end - 1] !== 0) { end += 2; break; } else if (data[end - 2] !== 0) { end++; break; } if (start !== end - 2) units.push(data.subarray(start, end - 2)); do { end++; } while (data[end] !== 1 && end < len); start = end + 1; end = start + 2; break; case 1: if (data[end - 1] !== 0 || data[end - 2] !== 0) { end += 3; break; } if (start !== end - 2) units.push(data.subarray(start, end - 2)); start = end + 1; end = start + 2; break; default: end += 3; break; } } if (start < len) units.push(data.subarray(start)); return units; } // AVCC格式 也叫AVC1格式,MPEG-4格式,字节对齐,因此也叫Byte-Stream Format。用于mp4/flv/mkv, VideoToolbox。 // 使用NALU长度(固定字节,通常为4字节)分隔NAL。 function parseAvcC(data) { let size = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 4; if (data.length < 4) return; const dataLen = data.length; const units = []; let offset = 0; let length; while (offset + size < dataLen) { length = readBig32$1(data, offset); if (size === 3) length >>>= 8; offset += size; if (!length) continue; if (offset + length > dataLen) { break; } units.push(data.subarray(offset, offset + length)); offset += length; } return units; } // remove 0x000003 function removeEPB(uint) { const length = uint.byteLength; const emulationPreventionBytesPositions = []; let i = 1; while (i < length - 2) { if (uint[i] === 0 && uint[i + 1] === 0 && uint[i + 2] === 0x03) { emulationPreventionBytesPositions.push(i + 2); i += 2; } else { i++; } } if (!emulationPreventionBytesPositions.length) return uint; const newLength = length - emulationPreventionBytesPositions.length; const newData = new Uint8Array(newLength); let sourceIndex = 0; for (i = 0; i < newLength; sourceIndex++, i++) { if (sourceIndex === emulationPreventionBytesPositions[0]) { sourceIndex++; emulationPreventionBytesPositions.shift(); } newData[i] = uint[sourceIndex]; } return newData; } // parse SEI function parseSEI(unit, isHevc) { const len = unit.length; let i = isHevc ? 2 : 1; let type = 0; let size = 0; let uuid = ''; while (unit[i] === 255) { type += 255; i++; } type += unit[i++]; while (unit[i] === 255) { size += 255; i++; } size += unit[i++]; if (type === 5 && len > i + 16) { for (let j = 0; j < 16; j++) { uuid += unit[i].toString(16); i++; } } return { payload: unit.subarray(i), type, size, uuid }; } // add preview 4 byte length (preview tag size) function addNaleHeaderLength(nalUnit) { const nalUnitLength = nalUnit.byteLength; const header = new Uint8Array(4); header[0] = nalUnitLength >>> 24 & 0xff; header[1] = nalUnitLength >>> 16 & 0xff; header[2] = nalUnitLength >>> 8 & 0xff; header[3] = nalUnitLength & 0xff; const result = new Uint8Array(nalUnitLength + 4); result.set(header, 0); result.set(nalUnit, 4); return result; } function getUnitSizeFromVideoSequenceHeader(payload, isHevc) { let result = null; if (isHevc) { if (payload.length >= 23 + 5) { result = (payload[21 + 5] & 3) + 1; } } else { if (payload.length >= 7 + 5) { result = (payload[4 + 5] & 3) + 1; } } return result; } function noop$3() {} function supportOffscreen($canvas) { return typeof $canvas.transferControlToOffscreen === 'function'; } function supportOffscreenV2() { return typeof OffscreenCanvas !== "undefined"; } function createContextGL($canvas) { let gl = null; const validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"]; let nameIndex = 0; while (!gl && nameIndex < validContextNames.length) { const contextName = validContextNames[nameIndex]; try { let contextOptions = { preserveDrawingBuffer: true }; gl = $canvas.getContext(contextName, contextOptions); } catch (e) { console.error(e); gl = null; } if (!gl || typeof gl.getParameter !== "function") { gl = null; } ++nameIndex; } return gl; } function createContextGL2($canvas) { let gl2 = null; gl2 = $canvas.getContext("webgl2"); return gl2; } function dataURLToFile() { let dataURL = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ''; const arr = dataURL.split(","); const bstr = atob(arr[1]); const type = arr[0].replace("data:", "").replace(";base64", ""); let n = bstr.length, u8arr = new Uint8Array(n); while (n--) { u8arr[n] = bstr.charCodeAt(n); } return new File([u8arr], 'file', { type }); } function now$2() { return new Date().getTime(); } (() => { try { if (typeof WebAssembly === "object" && typeof WebAssembly.instantiate === "function") { const module = new WebAssembly.Module(Uint8Array.of(0x0, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00)); if (module instanceof WebAssembly.Module) return new WebAssembly.Instance(module) instanceof WebAssembly.Instance; } } catch (e) {} return false; })(); function clamp(num, a, b) { return Math.max(Math.min(num, Math.max(a, b)), Math.min(a, b)); } function setStyle$1(element, key, value) { if (!element) { return; } if (typeof key === 'object') { Object.keys(key).forEach(item => { setStyle$1(element, item, key[item]); }); } element.style[key] = value; return element; } function getStyle(element, key) { let numberType = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true; if (!element) { return 0; } const value = getComputedStyle(element, null).getPropertyValue(key); return numberType ? parseFloat(value) : value; } function getNowTime() { if (performance && typeof performance.now === 'function') { return performance.now(); } return Date.now(); } function calculationRate(callback) { let totalSize = 0; let lastTime = getNowTime(); return size => { if (!isNumber(size)) { return; } totalSize += size; const thisTime = getNowTime(); const diffTime = thisTime - lastTime; if (diffTime >= 1000) { callback(totalSize / diffTime * 1000); lastTime = thisTime; totalSize = 0; } }; } const env = '"development"'; const proVersionTime = '"2-27-2024"'; const isRelease = env === '"release"'; function isMobile() { return /iphone|ipod|android.*mobile|windows.*phone|blackberry.*mobile/i.test(window.navigator.userAgent.toLowerCase()); } function isNoSleepMobile() { return /(iphone|ipad|ipod|ios|android)/i.test(window.navigator.userAgent.toLowerCase()); } function isPad() { return /ipad|android(?!.*mobile)|tablet|kindle|silk/i.test(window.navigator.userAgent.toLowerCase()); } function isPc() { return !(isMobile() || isPad()); } function isAndroid() { const UA = window.navigator.userAgent.toLowerCase(); return /android/i.test(UA); } function isFirefox() { const UA = window.navigator.userAgent.toLowerCase(); return /firefox/i.test(UA); } function getBrowser() { const UserAgent = window.navigator.userAgent.toLowerCase() || ''; const browserInfo = { type: '', version: '' }; const browserArray = { IE: window.ActiveXObject || "ActiveXObject" in window, // IE Chrome: UserAgent.indexOf('chrome') > -1 && UserAgent.indexOf('safari') > -1, // Chrome浏览器 Firefox: UserAgent.indexOf('firefox') > -1, // 火狐浏览器 Opera: UserAgent.indexOf('opera') > -1, // Opera浏览器 Safari: UserAgent.indexOf('safari') > -1 && UserAgent.indexOf('chrome') == -1, // safari浏览器 Edge: UserAgent.indexOf('edge') > -1, // Edge浏览器 QQBrowser: /qqbrowser/.test(UserAgent), // qq浏览器 WeixinBrowser: /MicroMessenger/i.test(UserAgent) // 微信浏览器 }; // console.log(browserArray) for (let i in browserArray) { if (browserArray[i]) { let versions = ''; if (i === 'IE') { const versionArray = UserAgent.match(/(msie\s|trident.*rv:)([\w.]+)/); if (versionArray && versionArray.length > 2) { versions = UserAgent.match(/(msie\s|trident.*rv:)([\w.]+)/)[2]; } } else if (i === 'Chrome') { for (let mt in navigator.mimeTypes) { //检测是否是360浏览器(测试只有pc端的360才起作用) if (navigator.mimeTypes[mt]['type'] === 'application/360softmgrplugin') { i = '360'; } } const versionArray = UserAgent.match(/chrome\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } else if (i === 'Firefox') { const versionArray = UserAgent.match(/firefox\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } else if (i === 'Opera') { const versionArray = UserAgent.match(/opera\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } else if (i === 'Safari') { const versionArray = UserAgent.match(/version\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } else if (i === 'Edge') { const versionArray = UserAgent.match(/edge\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } else if (i === 'QQBrowser') { const versionArray = UserAgent.match(/qqbrowser\/([\d.]+)/); if (versionArray && versionArray.length > 1) { versions = versionArray[1]; } } browserInfo.type = i; browserInfo.version = parseInt(versions); } } return browserInfo; } function isIOS() { const UA = window.navigator.userAgent.toLowerCase(); return UA && /iphone|ipad|ipod|ios/.test(UA); } function isSafari() { const ua = window.navigator.userAgent; return !ua.match(/Chrome/gi) && !!ua.match(/Safari/gi); } function parseTime(time, cFormat) { if (arguments.length === 0) { return null; } var format = cFormat || '{y}-{m}-{d} {h}:{i}:{s}'; var date; if (typeof time === 'object') { date = time; } else { if (('' + time).length === 10) time = parseInt(time) * 1000; time = +time; // 转成int 型 date = new Date(time); } var formatObj = { y: date.getFullYear(), m: date.getMonth() + 1, d: date.getDate(), h: date.getHours(), i: date.getMinutes(), s: date.getSeconds(), a: date.getDay() }; var time_str = format.replace(/{(y|m|d|h|i|s|a)+}/g, (result, key) => { var value = formatObj[key]; if (key === 'a') return ['一', '二', '三', '四', '五', '六', '日'][value - 1]; if (result.length > 0 && value < 10) { value = '0' + value; } return value || 0; }); return time_str; } // 是否支持 webcodecs function supportWCS() { return "VideoEncoder" in window; } function supportWasmUseVideoRender() { return 'VideoFrame' in window; } function toNumber(value) { if (typeof value !== 'string') { return value; } else { // 转换成 number 类型 var parsed = Number(value); return isNaN(parsed) ? value : parsed; } } function uuid16() { return 'xxxxxxxxxxxx4xxx'.replace(/[xy]/g, function (c) { var r = Math.random() * 16 | 0, v = c == 'x' ? r : r & 0x3 | 0x8; return v.toString(16); }); } function throttle(callback, delay) { let isThrottled = false; let args; let context; function fn() { for (var _len = arguments.length, args2 = new Array(_len), _key = 0; _key < _len; _key++) { args2[_key] = arguments[_key]; } if (isThrottled) { args = args2; context = this; return; } isThrottled = true; callback.apply(this, args2); setTimeout(() => { isThrottled = false; if (args) { fn.apply(context, args); args = null; context = null; } }, delay); } return fn; } function isFullScreen() { // return document.isFullScreen || document.mozIsFullScreen || document.webkitIsFullScreen; return screenfull.isFullscreen; } // function bpsSize(value) { if (null == value || value === '') { return "0 KB/s"; } let size = parseFloat(value); size = size.toFixed(2); return size + 'KB/s'; } function bpsSize$2(value) { if (null == value || value === '' || parseFloat(value) === 0 || value === 'NaN') { return "0 KB/s"; } const unitArr = ["KB/s", "MB/s", "GB/s", "TB/s", "PB/s", "EB/s", "ZB/s", "YB/s"]; let index = 0; const srcsize = parseFloat(value); index = Math.floor(Math.log(srcsize) / Math.log(1024)); let size = srcsize / Math.pow(1024, index); size = size.toFixed(2); // return size + (unitArr[index] || unitArr[0]); } function formatFileSize(value) { if (null == value || value == '') { return "0 Bytes"; } const unitArr = new Array("Bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"); let index = 0; const srcsize = parseFloat(value); index = Math.floor(Math.log(srcsize) / Math.log(1024)); var size = srcsize / Math.pow(1024, index); size = size.toFixed(2); //保留的小数位数 return size + unitArr[index]; } function isNumber(value) { const toString = Object.prototype.toString; return toString.call(value) === "[object Number]"; } function fpsStatus(fps, metaDataFps) { let result = 3; // 流畅 const baseFps = metaDataFps || 25; if (fps < baseFps * 0.33) { result = 0; // 非常卡顿 } else if (fps < baseFps * 0.5) { result = 1; // 卡顿 } else if (fps < baseFps * 0.83) { result = 2; // 稍微卡顿 } return result; } function createEmptyImageBitmap(width, height) { const $canvasElement = document.createElement("canvas"); $canvasElement.width = width; $canvasElement.height = height; const imageBitmap = window.createImageBitmap($canvasElement, 0, 0, width, height); // release canvas $canvasElement.width = 0; $canvasElement.height = 0; return imageBitmap; } function supportMSE() { let result = false; if ('MediaSource' in self) { result = true; } return result; } function supportIosMSE() { let result = false; if (!('MediaSource' in self) && 'ManagedMediaSource' in self) { result = true; } return result; } // function supportMSEDecodeHevc() { let result = false; if ('MediaSource' in self && (self.MediaSource.isTypeSupported(MP4_CODECS.hev) || self.MediaSource.isTypeSupported(MP4_CODECS.hev2) || self.MediaSource.isTypeSupported(MP4_CODECS.hev3) || self.MediaSource.isTypeSupported(MP4_CODECS.hev4) || self.MediaSource.isTypeSupported(MP4_CODECS.hev5))) { result = true; } return result; } function supportIosMSEDecodeHevc() { let result = false; if (!('MediaSource' in self) && 'ManagedMediaSource' in self && (self.ManagedMediaSource.isTypeSupported(MP4_CODECS.hev) || self.ManagedMediaSource.isTypeSupported(MP4_CODECS.hev2) || self.ManagedMediaSource.isTypeSupported(MP4_CODECS.hev3) || self.ManagedMediaSource.isTypeSupported(MP4_CODECS.hev4) || self.ManagedMediaSource.isTypeSupported(MP4_CODECS.hev5))) { result = true; } return result; } // 查看是否webcodecs 支持 hevc解码 // chrome 107 function supportWCSDecodeHevc() { const browserInfo = getBrowser(); return browserInfo.type.toLowerCase() === 'chrome' && browserInfo.version >= 107; } function supportMediaStreamTrack() { let result = false; if ('MediaStreamTrackGenerator' in window) { result = true; } return result; } function supportMediaStream() { let result = false; if ('MediaStream' in window) { result = true; } return result; } function saveBlobToFile(fileName, blob) { let url = window.URL.createObjectURL(blob); let aLink = window.document.createElement('a'); aLink.download = fileName; aLink.href = url; //创建内置事件并触发 let evt = window.document.createEvent('MouseEvents'); evt.initEvent("click", true, true); //initEvent 不加后两个参数在FF下会报错 事件类型,是否冒泡,是否阻止浏览器的默认行为 aLink.dispatchEvent(evt); setTimeout(() => { window.URL.revokeObjectURL(url); }, isIOS() ? 1000 : 0); } function isEmpty(value) { return value === null || value === undefined; } function isBoolean(value) { return value === true || value === false; } function isNotEmpty(value) { return !isEmpty(value); } function isUndefined(value) { return value === undefined; } function initPlayTimes() { return { playInitStart: '', //1 playStart: '', // 2 streamStart: '', //3 streamResponse: '', // 4 demuxStart: '', // 5 decodeStart: '', // 6 videoStart: '', // 7 playTimestamp: '', // playStart- playInitStart streamTimestamp: '', // streamStart - playStart streamResponseTimestamp: '', // streamResponse - streamStart demuxTimestamp: '', // demuxStart - streamResponse decodeTimestamp: '', // decodeStart - demuxStart videoTimestamp: '', // videoStart - decodeStart allTimestamp: '' // videoStart - playInitStart }; } function formatWatermarkOptions(options) { let defaultConfig = { left: '', right: '', top: '', bottom: '', opacity: 1, backgroundColor: '', image: { src: '', width: '100', height: '60' }, text: { content: '', fontSize: '14', color: '#000', width: '', height: '' }, rect: { color: 'green', // border color lineWidth: 2, width: '', height: '', fill: '', // fill color fillOpacity: 0.2 // fill opacity }, line: { x1: '', y1: '', x2: '', y2: '', color: 'green', lineWidth: 2 }, // 多边形 polygon: { color: 'green', lineWidth: 2, list: [], fill: '', // fill color fillOpacity: 0.2 // fill opacity }, html: '' }; const imageConfig = Object.assign(defaultConfig.image, options.image || {}); const textConfig = Object.assign(defaultConfig.text, options.text || {}); const rectConfig = Object.assign(defaultConfig.rect, options.rect || {}); const lineConfig = Object.assign(defaultConfig.line, options.line || {}); defaultConfig = Object.assign(defaultConfig, options, { image: imageConfig, text: textConfig, rect: rectConfig, line: lineConfig }); return defaultConfig; } function formatFullscreenWatermarkOptions(container, options) { let defaultConfig = { container: container || '', text: '', // 文本 opacity: '', // 透明度 angle: '', // 倾斜角度 color: '', // 字体颜色 fontSize: '', // 字体大小 fontFamily: '' // 字体 }; defaultConfig = Object.assign(defaultConfig, options); return { watermark_parent_node: defaultConfig.container, watermark_alpha: defaultConfig.opacity, watermark_angle: defaultConfig.angle, watermark_fontsize: defaultConfig.fontSize, watermark_color: defaultConfig.color, watermark_font: defaultConfig.fontFamily, watermark_txt: defaultConfig.text }; } // create image watermark function createImageWatermark(dataUrl, options) { return new Promise((resolve, reject) => { let defaultConfig = formatWatermarkOptions(options); if (!defaultConfig.image.src && !defaultConfig.text.content) { return resolve(dataUrl); } let canvas = document.createElement('canvas'); canvas.width = options.width; canvas.height = options.height; let ctx = canvas.getContext('2d'); let x = 0; let y = 0; if (isNumber(defaultConfig.left)) { x = defaultConfig.left; } else if (isNumber(defaultConfig.right)) { x = canvas.width - defaultConfig.right; } if (isNumber(defaultConfig.top)) { y = defaultConfig.top; } else if (isNumber(defaultConfig.bottom)) { y = canvas.height - defaultConfig.bottom; } const imag = new Image(); imag.src = dataUrl; imag.onload = () => { ctx.drawImage(imag, 0, 0); if (defaultConfig.image && defaultConfig.image.src) { const tempImage = new Image(); tempImage.src = defaultConfig.image.src; tempImage.setAttribute("crossOrigin", 'Anonymous'); tempImage.onload = () => { x -= defaultConfig.image.width; // y -= defaultConfig.image.height; ctx.drawImage(tempImage, x, y, defaultConfig.image.width, defaultConfig.image.height); resolve(canvas.toDataURL(options.format, options.quality)); }; tempImage.onerror = e => { reject(); }; } else if (defaultConfig.text && defaultConfig.text.content) { // 设置填充字号和字体,样式 ctx.font = defaultConfig.text.fontSize + "px 宋体"; // color ctx.fillStyle = defaultConfig.text.color; // 设置右对齐 ctx.textAlign = 'right'; // 在指定位置绘制文字,这里指定距离右下角20坐标的地方 ctx.fillText(defaultConfig.text.content, x, y); resolve(canvas.toDataURL(options.format, options.quality)); } }; imag.onerror = e => { reject(e); }; }); } function formatTimeTips(time) { var result; // if (time > -1) { var hour = Math.floor(time / 3600); var min = Math.floor(time / 60) % 60; var sec = time % 60; sec = Math.round(sec); if (hour < 10) { result = '0' + hour + ":"; } else { result = hour + ":"; } if (min < 10) { result += "0"; } result += min + ":"; if (sec < 10) { result += "0"; } result += sec.toFixed(0); } return result; } // function createVideoFrame(arrayBuffer, init) { return new VideoFrame(arrayBuffer, init); } // min timestamp function formatMinTimeTips(time, second) { let result = ''; // if (time > -1) { const hour = Math.floor(time / 60) % 60; let min = time % 60; min = Math.round(min); if (hour < 10) { result = '0' + hour + ":"; } else { result = hour + ":"; } if (min < 10) { result += "0"; } result += min; if (!isEmpty(second)) { if (second < 10) { second = '0' + second; } result += ':' + second; } } return result; } // second timestamp function formatSecondTimeTips(time) { let result = ''; if (time > -1) { const hour = Math.floor(time / 60 / 60) % 60; let min = Math.floor(time / 60) % 60; let second = time % 60; min = Math.round(min); if (hour < 10) { result = '0' + hour + ":"; } else { result = hour + ":"; } if (min < 10) { result += "0"; } result += min + ':'; if (second < 10) { result += '0'; } result += second; } return result; } function formatSecondTime(time) { let result = {}; if (time > -1) { const hour = Math.floor(time / 60 / 60) % 60; let min = Math.floor(time / 60) % 60; let second = time % 60; result = { hour, min, second }; } return result; } /** * * @param time */ function formatMinuteTimestamp(day, time) { const hour = Math.floor(time / 60) % 60; const min = Math.floor(time % 60); const nowMinuteTimestamp = new Date(day).setHours(hour, min, 0, 0); return nowMinuteTimestamp; } function formatSecondTimestamp(day, time) { const hour = Math.floor(time / 60 / 60) % 60; const min = Math.floor(time / 60) % 60; const second = time % 60; const nowSecondTimestamp = new Date(day).setHours(hour, min, second, 0); return nowSecondTimestamp; } function getStrLength(value) { return ('' + value).length; } function isEmptyObject(obj) { return obj && Object.keys(obj).length === 0; } function isNotEmptyObject(obj) { return !isEmptyObject(obj); } function isString(value) { return typeof value === "string"; } function isSupportSIMD() { return WebAssembly.validate(new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0, 1, 5, 1, 96, 0, 1, 123, 3, 2, 1, 0, 10, 10, 1, 8, 0, 65, 0, 253, 15, 253, 98, 11])); } function isSupportSharedArrayBuffer() { return typeof SharedArrayBuffer !== 'undefined'; } const isWeChat = () => { const userAgent = window.navigator.userAgent; return /MicroMessenger/i.test(userAgent); }; const isChrome = () => { const userAgent = window.navigator.userAgent; return /Chrome/i.test(userAgent); }; // const isWeChatInAndroid = () => { return isWeChat() && isAndroid(); }; const isWeChatInIOS = () => { return isWeChat() && isIOS(); }; function getTarget(e) { const event = e || window.event; const target = event.target || event.srcElement; return target; } function isMacOs() { const userAgent = navigator.userAgent.toLowerCase(); return /macintosh|mac os x/i.test(userAgent); } function isMacOsFirefox() { return isFirefox() && isMacOs(); } function isFunction$1(fn) { return typeof fn === "function"; } function isWebglRenderSupport(width) { return width / 2 % 4 === 0; } /** * * @param event * @returns {{posX: number, posY: number}} */ function getMousePosition(event) { if (isMobile()) { let clientX = 0; let clientY = 0; if (event.touches.length === 1) { let clientObject = event.touches[0]; clientX = clientObject.clientX; clientY = clientObject.clientY; } return { posX: clientX, posY: clientY }; } let posX = 0; let posY = 0; const e = event || window.event; //标准化事件对象 if (e.pageX || e.pageY) { //获取鼠标指针的当前坐标值 posX = e.pageX; posY = e.pageY; } else if (e.clientX || e.clientY) { posX = event.clientX + document.documentElement.scrollLeft + document.body.scrollLeft; posY = event.clientY + document.documentElement.scrollTop + document.body.scrollTop; } return { posX, posY }; } function canPlayAppleMpegurl() { let video = document.createElement('video'); let result = video.canPlayType('application/vnd.apple.mpegurl'); video = null; return result; } function isSupportGetUserMedia() { let result = false; const navigator = window.navigator; if (navigator) { result = !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia); if (!result) { result = !!(navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia); } } return result; } function onlyMseOrWcsVideo(opt) { let result = isFalse(opt.hasAudio) && (opt.useMSE || opt.useWCS && !opt.useOffscreen) && isFalse(opt.demuxUseWorker); // check is mse play audio and video if (isFalse(result) && opt.useMSE && opt.mseDecodeAudio && isFalse(opt.demuxUseWorker)) { return true; } return result; } function checkNaluType(naluBuffer) { let result = null; let type = naluBuffer[0] & 0b0001_1111; if (type === H264_NAL_TYPE.sps || type === H264_NAL_TYPE.pps) { result = VIDEO_ENC_TYPE_SHOW.h264; } if (!result) { type = (naluBuffer[0] & 0x7E) >> 1; if (type === H265_NAL_TYPE.vps || type === H265_NAL_TYPE.sps || type === H265_NAL_TYPE.pps) { result = VIDEO_ENC_TYPE_SHOW.h265; } } return result; } function createWorkletModuleUrl(func) { function functionToString(str) { return str.trim().match(/^function\s*\w*\s*\([\w\s,]*\)\s*{([\w\W]*?)}$/)[1]; } const funcStr = functionToString(func.toString()); const blob = new Blob([funcStr], { type: 'application/javascript' }); return URL.createObjectURL(blob); } function supportWritableStream() { return typeof WritableStream !== 'undefined'; } function closeVideoFrame(videoFrame) { videoFrame.close(); } function isInHttps() { return window.location.protocol === 'https:' || window.location.hostname === 'localhost'; } function errorToString(error) { const nativeToString = Object.prototype.toString; function isErrorLike(error) { switch (nativeToString.call(error)) { case '[object Error]': return true; case '[object Exception]': return true; case '[object DOMException]': return true; default: try { return error instanceof Error; } catch (e) { return false; } } } if (isErrorLike(error)) { return error.message; } else { return error == null ? '' : typeof error === 'object' ? JSON.stringify(error, null, 2) : String(error); } } function calcStreamFpsByBufferList(bufferList, type) { if (type) { bufferList = bufferList.filter(item => item.type && item.type === type); } let firstItem = bufferList[0]; let oneSecondLength = null; let nextIndex = 1; if (bufferList.length > 0) { let nextItem = bufferList[1]; if (nextItem && nextItem.ts - firstItem.ts > 100000) { firstItem = nextItem; nextIndex = 2; } } if (firstItem) { // next start for (let i = nextIndex; i < bufferList.length; i++) { let tempItem = bufferList[i]; if (type && tempItem.type && tempItem.type !== type) { tempItem = null; } if (tempItem) { const diff = tempItem.ts - firstItem.ts; if (diff >= 1000) { const prevTempItem = bufferList[i - 1]; const diff2 = prevTempItem.ts - firstItem.ts; if (diff2 < 1000) { oneSecondLength = i + 1; } } } } } return oneSecondLength; } function isFetchSuccess(res) { return res.ok && res.status >= 200 && res.status <= 299; } // stroke rect or text in canvas function strokeRectOrTextInCanvas(_ref) { let { ctx, list } = _ref; ctx.save(); (list || []).forEach(item => { if (item.type === 'text') { ctx.font = `${item.fontSize || 12}px Arial`; ctx.fillStyle = item.color || 'green'; ctx.fillText(item.text, item.x, item.y); } else if (item.type === 'rect') { ctx.strokeStyle = item.color || 'green'; ctx.lineWidth = item.lineWidth || 2; ctx.strokeRect(item.x, item.y, item.width, item.height); } }); ctx.restore(); } function hexToRgba(hex) { const r = parseInt(hex.substring(1, 3), 16) / 255; const g = parseInt(hex.substring(3, 5), 16) / 255; const b = parseInt(hex.substring(5, 7), 16) / 255; return [r, g, b, 1.0]; } function getUrlRelativePath(href) { const url = href || document.location.toString(); const arrUrl = url.split("//"); const start = arrUrl[1].indexOf("/"); let relUrl = arrUrl[1].substring(start); //stop省略,截取从start开始到结尾的所有字符 if (relUrl.indexOf("?") != -1) { relUrl = relUrl.split("?")[0]; } return relUrl; } function b64toUin8(base64String) { var padding = '='.repeat((4 - base64String.length % 4) % 4); var base64 = (base64String + padding).replace(/\-/g, '+').replace(/_/g, '/'); var rawData = window.atob(base64); var outputArray = new Uint8Array(rawData.length); for (var i = 0; i < rawData.length; ++i) { outputArray[i] = rawData.charCodeAt(i); } return outputArray; } function resolveUrl(url) { const msie = /(msie|trident)/i.test(navigator.userAgent); const urlParsingNode = document.createElement('a'); let href = url; if (msie) { urlParsingNode.setAttribute('href', href); href = urlParsingNode.href; } urlParsingNode.setAttribute('href', href); return { origin: urlParsingNode.origin, href: urlParsingNode.href, protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', host: urlParsingNode.host, search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', hostname: urlParsingNode.hostname, port: urlParsingNode.port, pathname: urlParsingNode.pathname.charAt(0) === '/' ? urlParsingNode.pathname : '/' + urlParsingNode.pathname }; } function uuid4() { return 'xxxx'.replace(/[xy]/g, function (c) { var r = Math.random() * 16 | 0, v = c == 'x' ? r : r & 0x3 | 0x8; return v.toString(16); }); } function clone(obj) { let result = ''; // if (typeof obj === 'object') { try { result = JSON.stringify(obj); result = JSON.parse(result); } catch (e) { result = obj; } } else { result = obj; } return result; } /** * * @returns {object:DEFAULT_JESSIBUCA_OPTIONS} */ function getDefaultJessibucaOptions() { return clone(DEFAULT_JESSIBUCA_OPTIONS); } /** * * @returns {object:DEFAULT_PLAYER_OPTIONS} */ function getDefaultPlayerOptions() { return clone(DEFAULT_PLAYER_OPTIONS); } /** * * @returns {object:DEFAULT_TALK_OPTIONS} */ function getDefaultTalkOptions() { return clone(DEFAULT_TALK_OPTIONS); } /** * * @returns {object:CONTROL_BUTTON_OPTIONS} */ function getDefaultButtonOptions() { return clone(CONTROL_BUTTON_OPTIONS); } /** * * @returns {object:MENU_ITEM_OPTIONS} */ function getDefaultMenuOptions() { return clone(MENU_ITEM_OPTIONS); } function isVideoSequenceHeader(payload) { return payload[0] >> 4 === FRAME_TYPE.keyFrame && payload[1] === AVC_PACKET_TYPE.sequenceHeader; } function isTrue(value) { return value === true || value === 'true'; } function isFalse(value) { return value !== true && value !== 'true'; } function isWebGpuSupport() { let result = false; if ('gpu' in navigator) { result = true; } return result; } function sleep() { let t = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; return new Promise(resolve => setTimeout(resolve, t)); } function setElementDataset(element, key, value) { if (!element) { return; } if (element.dataset) { element.dataset[key] = value; } else { element.setAttribute('data-' + key, value); } } function getElementDataset(element, key) { if (!element) { return ''; } if (element.dataset) { return element.dataset[key]; } return element.getAttribute('data-' + key); } function removeElementDataset(element, key) { if (!element) { return; } if (element.dataset) { delete element.dataset[key]; } else { element.removeAttribute('data-' + key); } } function convertToCamelCase(str) { return str.replace(/-([a-z])/g, function (match, letter) { return letter.toUpperCase(); }); } // 是否iphone 手机端 function isIphone() { return /iphone/i.test(navigator.userAgent); } // function getPerformanceMemory() { if (window.performance && window.performance.memory) { return window.performance.memory; } return null; } function isWebGL2Supported() { try { var canvas = document.createElement('canvas'); return !!(window.WebGL2RenderingContext && canvas.getContext('webgl2')); } catch (e) { return false; } } function function2String(fun) { return fun.trim().match(/^function\s*\w*\s*\([\w\s,]*\)\s*{([\w\W]*?)}$/)[1]; } function supportVideoFrameCallback() { let result = false; if ("requestVideoFrameCallback" in HTMLVideoElement.prototype) { result = true; } return result; } function supportPressureObserver() { let result = false; // or use 'globalThis' support if ('PressureObserver' in window) { result = true; } return result; } function removeMaxAndMin(arr) { // 寻找最大值和最小值 const max = Math.max(...arr); const min = Math.min(...arr); // 使用filter过滤掉最大值和最小值 // 注意:这将仅删除第一个遇到的最大值和最小值 return arr.filter(value => value !== max && value !== min); } function supportMSEForWorker() { return self.Worker && self.MediaSource && 'canConstructInDedicatedWorker' in self.MediaSource && self.MediaSource['canConstructInDedicatedWorker'] === true ? true : false; } class Events$1 { constructor(master) { this.destroys = []; this.proxy = this.proxy.bind(this); this.master = master; } proxy(target, name, callback) { let option = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; if (!target) { return; } if (Array.isArray(name)) { return name.map(item => this.proxy(target, item, callback, option)); } target.addEventListener(name, callback, option); const destroy = () => { if (isFunction$1(target.removeEventListener)) { target.removeEventListener(name, callback, option); } }; this.destroys.push(destroy); return destroy; } destroy() { this.master.debug && this.master.debug.log(`Events`, 'destroy'); this.destroys.forEach(event => event()); this.destroys = []; } } var property$1 = (player => { Object.defineProperty(player, 'rect', { get: () => { let clientRect = {}; if (player.$container) { clientRect = player.$container.getBoundingClientRect(); clientRect.width = Math.max(clientRect.width, player.$container.clientWidth); clientRect.height = Math.max(clientRect.height, player.$container.clientHeight); } return clientRect; } }); ['bottom', 'height', 'left', 'right', 'top', 'width'].forEach(key => { Object.defineProperty(player, key, { get: () => { return player.rect[key] || 0; } }); }); }); var events$1 = (player => { try { const screenfullChange = e => { if (getTarget(e) === player.$container) { // 抛出对外的事件。 player.emit(JESSIBUCA_EVENTS.fullscreen, player.fullscreen); // 如果不是fullscreen,则触发下 resize 方法 // 退出全屏的情况下。 if (!player.fullscreen) { player.resize(); } else { // if (player._opt.useMSE) { player.resize(); } } } }; screenfull.on('change', screenfullChange); player.events.destroys.push(() => { screenfull.off('change', screenfullChange); }); } catch (error) { // } // player.on(EVENTS.decoderWorkerInit, () => { player.debug.log('player', 'listen decoderWorkerInit and set loaded true'); player.loaded = true; }); // player.on(EVENTS.play, () => { player.loading = false; }); // player.on(EVENTS.fullscreen, value => { if (value) { try { screenfull.request(player.$container).then(() => {}).catch(e => { player.debug.error('player', 'fullscreen request error', e); if (isMobile() && player._opt.useWebFullScreen) { player.webFullscreen = true; } }); } catch (e) { if (isMobile() && player._opt.useWebFullScreen) { player.webFullscreen = true; } } } else { try { screenfull.exit().then(() => { if (player.webFullscreen) { player.webFullscreen = false; } }).catch(e => { player.debug.error('player', 'fullscreen exit error', e); if (player.webFullscreen) { player.webFullscreen = false; } }); } catch (e) { if (player.webFullscreen) { player.webFullscreen = false; } } } }); // just for mobile check if (isMobile()) { player.on(EVENTS.webFullscreen, value => { if (value) { player.$container.classList.add('jb-pro-fullscreen-web'); } else { player.$container.classList.remove('jb-pro-fullscreen-web'); } // 抛出对外的事件。 player.emit(JESSIBUCA_EVENTS.fullscreen, player.fullscreen); }); } // player.on(EVENTS.resize, () => { player.video && player.video.resize(); }); if (player._opt.debug) { const ignoreList = [EVENTS.timeUpdate, EVENTS.currentPts, EVENTS.videoSEI]; const stringList = [EVENTS.stats, EVENTS.playbackStats, EVENTS.playbackTimestamp, EVENTS.flvMetaData, EVENTS.playToRenderTimes, EVENTS.audioInfo, EVENTS.videoInfo]; Object.keys(EVENTS).forEach(key => { player.on(EVENTS[key], function (value) { if (ignoreList.includes(key)) { return; } if (stringList.includes(key)) { value = JSON.stringify(value); } for (var _len = arguments.length, value2 = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { value2[_key - 1] = arguments[_key]; } player.debug.log('player events', EVENTS[key], value, ...value2); }); }); Object.keys(EVENTS_ERROR).forEach(key => { player.on(EVENTS_ERROR[key], function () { for (var _len2 = arguments.length, value = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { value[_key2] = arguments[_key2]; } player.debug.warn('player event error', EVENTS_ERROR[key], ...value); }); }); } }); class Emitter { on(name, fn, ctx) { const e = this.e || (this.e = {}); (e[name] || (e[name] = [])).push({ fn, ctx }); return this; } once(name, fn, ctx) { const self = this; function listener() { self.off(name, listener); for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } fn.apply(ctx, args); } listener._ = fn; return this.on(name, listener, ctx); } emit(name) { const evtArr = ((this.e || (this.e = {}))[name] || []).slice(); for (var _len2 = arguments.length, data = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) { data[_key2 - 1] = arguments[_key2]; } for (let i = 0; i < evtArr.length; i += 1) { evtArr[i].fn.apply(evtArr[i].ctx, data); } return this; } off(name, callback) { const e = this.e || (this.e = {}); if (!name) { Object.keys(e).forEach(key => { delete e[key]; }); delete this.e; return; } const evts = e[name]; const liveEvents = []; if (evts && callback) { for (let i = 0, len = evts.length; i < len; i += 1) { if (evts[i].fn !== callback && evts[i].fn._ !== callback) liveEvents.push(evts[i]); } } if (liveEvents.length) { e[name] = liveEvents; } else { delete e[name]; } return this; } } /** * Common utilities * @module glMatrix */ // Configuration Constants var EPSILON = 0.000001; var ARRAY_TYPE = typeof Float32Array !== 'undefined' ? Float32Array : Array; if (!Math.hypot) Math.hypot = function () { var y = 0, i = arguments.length; while (i--) { y += arguments[i] * arguments[i]; } return Math.sqrt(y); }; /** * 4x4 Matrix
Format: column-major, when typed out it looks like row-major
The matrices are being post multiplied. * @module mat4 */ /** * Creates a new identity mat4 * * @returns {mat4} a new 4x4 matrix */ function create$1() { var out = new ARRAY_TYPE(16); if (ARRAY_TYPE != Float32Array) { out[1] = 0; out[2] = 0; out[3] = 0; out[4] = 0; out[6] = 0; out[7] = 0; out[8] = 0; out[9] = 0; out[11] = 0; out[12] = 0; out[13] = 0; out[14] = 0; } out[0] = 1; out[5] = 1; out[10] = 1; out[15] = 1; return out; } /** * Set a mat4 to the identity matrix * * @param {mat4} out the receiving matrix * @returns {mat4} out */ function identity(out) { out[0] = 1; out[1] = 0; out[2] = 0; out[3] = 0; out[4] = 0; out[5] = 1; out[6] = 0; out[7] = 0; out[8] = 0; out[9] = 0; out[10] = 1; out[11] = 0; out[12] = 0; out[13] = 0; out[14] = 0; out[15] = 1; return out; } /** * Generates a orthogonal projection matrix with the given bounds. * The near/far clip planes correspond to a normalized device coordinate Z range of [-1, 1], * which matches WebGL/OpenGL's clip volume. * * @param {mat4} out mat4 frustum matrix will be written into * @param {number} left Left bound of the frustum * @param {number} right Right bound of the frustum * @param {number} bottom Bottom bound of the frustum * @param {number} top Top bound of the frustum * @param {number} near Near bound of the frustum * @param {number} far Far bound of the frustum * @returns {mat4} out */ function orthoNO(out, left, right, bottom, top, near, far) { var lr = 1 / (left - right); var bt = 1 / (bottom - top); var nf = 1 / (near - far); out[0] = -2 * lr; out[1] = 0; out[2] = 0; out[3] = 0; out[4] = 0; out[5] = -2 * bt; out[6] = 0; out[7] = 0; out[8] = 0; out[9] = 0; out[10] = 2 * nf; out[11] = 0; out[12] = (left + right) * lr; out[13] = (top + bottom) * bt; out[14] = (far + near) * nf; out[15] = 1; return out; } /** * Alias for {@link mat4.orthoNO} * @function */ var ortho = orthoNO; /** * Generates a look-at matrix with the given eye position, focal point, and up axis. * If you want a matrix that actually makes an object look at another object, you should use targetTo instead. * * @param {mat4} out mat4 frustum matrix will be written into * @param {ReadonlyVec3} eye Position of the viewer * @param {ReadonlyVec3} center Point the viewer is looking at * @param {ReadonlyVec3} up vec3 pointing up * @returns {mat4} out */ function lookAt(out, eye, center, up) { var x0, x1, x2, y0, y1, y2, z0, z1, z2, len; var eyex = eye[0]; var eyey = eye[1]; var eyez = eye[2]; var upx = up[0]; var upy = up[1]; var upz = up[2]; var centerx = center[0]; var centery = center[1]; var centerz = center[2]; if (Math.abs(eyex - centerx) < EPSILON && Math.abs(eyey - centery) < EPSILON && Math.abs(eyez - centerz) < EPSILON) { return identity(out); } z0 = eyex - centerx; z1 = eyey - centery; z2 = eyez - centerz; len = 1 / Math.hypot(z0, z1, z2); z0 *= len; z1 *= len; z2 *= len; x0 = upy * z2 - upz * z1; x1 = upz * z0 - upx * z2; x2 = upx * z1 - upy * z0; len = Math.hypot(x0, x1, x2); if (!len) { x0 = 0; x1 = 0; x2 = 0; } else { len = 1 / len; x0 *= len; x1 *= len; x2 *= len; } y0 = z1 * x2 - z2 * x1; y1 = z2 * x0 - z0 * x2; y2 = z0 * x1 - z1 * x0; len = Math.hypot(y0, y1, y2); if (!len) { y0 = 0; y1 = 0; y2 = 0; } else { len = 1 / len; y0 *= len; y1 *= len; y2 *= len; } out[0] = x0; out[1] = y0; out[2] = z0; out[3] = 0; out[4] = x1; out[5] = y1; out[6] = z1; out[7] = 0; out[8] = x2; out[9] = y2; out[10] = z2; out[11] = 0; out[12] = -(x0 * eyex + x1 * eyey + x2 * eyez); out[13] = -(y0 * eyex + y1 * eyey + y2 * eyez); out[14] = -(z0 * eyex + z1 * eyey + z2 * eyez); out[15] = 1; return out; } /** * 3 Dimensional Vector * @module vec3 */ /** * Creates a new, empty vec3 * * @returns {vec3} a new 3D vector */ function create() { var out = new ARRAY_TYPE(3); if (ARRAY_TYPE != Float32Array) { out[0] = 0; out[1] = 0; out[2] = 0; } return out; } /** * Creates a new vec3 initialized with the given values * * @param {Number} x X component * @param {Number} y Y component * @param {Number} z Z component * @returns {vec3} a new 3D vector */ function fromValues(x, y, z) { var out = new ARRAY_TYPE(3); out[0] = x; out[1] = y; out[2] = z; return out; } /** * Perform some operation over an array of vec3s. * * @param {Array} a the array of vectors to iterate over * @param {Number} stride Number of elements between the start of each vec3. If 0 assumes tightly packed * @param {Number} offset Number of elements to skip at the beginning of the array * @param {Number} count Number of vec3s to iterate over. If 0 iterates over entire array * @param {Function} fn Function to call for each vector in the array * @param {Object} [arg] additional argument to pass to fn * @returns {Array} a * @function */ (function () { var vec = create(); return function (a, stride, offset, count, fn, arg) { var i, l; if (!stride) { stride = 3; } if (!offset) { offset = 0; } if (count) { l = Math.min(count * stride + offset, a.length); } else { l = a.length; } for (i = offset; i < l; i += stride) { vec[0] = a[i]; vec[1] = a[i + 1]; vec[2] = a[i + 2]; fn(vec, vec, arg); a[i] = vec[0]; a[i + 1] = vec[1]; a[i + 2] = vec[2]; } return a; }; })(); class WebglRender { constructor(gl, openWebglAlignment) { this.gl = gl; if (openWebglAlignment) { this.gl.pixelStorei(this.gl.UNPACK_ALIGNMENT, 1); } const shaderProgram = this._initShaderProgram(); this._programInfo = { program: shaderProgram, attribLocations: { vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'), texturePosition: gl.getAttribLocation(shaderProgram, 'aTexturePosition') }, uniformLocations: { projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'), modelMatrix: gl.getUniformLocation(shaderProgram, 'uModelMatrix'), viewMatrix: gl.getUniformLocation(shaderProgram, 'uViewMatrix'), rgbatexture: gl.getUniformLocation(shaderProgram, 'rgbaTexture'), ytexture: gl.getUniformLocation(shaderProgram, 'yTexture'), utexture: gl.getUniformLocation(shaderProgram, 'uTexture'), vtexture: gl.getUniformLocation(shaderProgram, 'vTexture'), isyuv: gl.getUniformLocation(shaderProgram, 'isyuv') } }; this._buffers = this._initBuffers(); this._rgbatexture = this._createTexture(); this._ytexture = this._createTexture(); this._utexture = this._createTexture(); this._vtexture = this._createTexture(); } destroy() { this.gl.deleteProgram(this._programInfo.program); this.gl.deleteBuffer(this._buffers.position); this.gl.deleteBuffer(this._buffers.texPosition); this.gl.deleteBuffer(this._buffers.indices); this.gl.deleteTexture(this._rgbatexture); this.gl.deleteTexture(this._ytexture); this.gl.deleteTexture(this._utexture); this.gl.deleteTexture(this._vtexture); this._programInfo = null; this._buffers = null; this._rgbatexture = null; this._ytexture = null; this._utexture = null; this._vtexture = null; } _initShaderProgram() { const vertexShaderScript = ` attribute vec4 aVertexPosition; attribute vec2 aTexturePosition; varying lowp vec2 vTexturePosition; void main(void) { gl_Position = aVertexPosition; vTexturePosition = aTexturePosition; } `; const fragmentShaderScript = ` precision highp float; varying highp vec2 vTexturePosition; uniform int isyuv; uniform sampler2D rgbaTexture; uniform sampler2D yTexture; uniform sampler2D uTexture; uniform sampler2D vTexture; const mat4 YUV2RGB = mat4( 1.1643828125, 0, 1.59602734375, -.87078515625, 1.1643828125, -.39176171875, -.81296875, .52959375, 1.1643828125, 2.017234375, 0, -1.081390625, 0, 0, 0, 1); void main(void) { if (isyuv>0) { highp float y = texture2D(yTexture, vTexturePosition).r; highp float u = texture2D(uTexture, vTexturePosition).r; highp float v = texture2D(vTexture, vTexturePosition).r; gl_FragColor = vec4(y, u, v, 1) * YUV2RGB; } else { gl_FragColor = texture2D(rgbaTexture, vTexturePosition); } } `; const vertexShader = this._loadShader(this.gl.VERTEX_SHADER, vertexShaderScript); const fragmentShader = this._loadShader(this.gl.FRAGMENT_SHADER, fragmentShaderScript); // Create the shader program const shaderProgram = this.gl.createProgram(); this.gl.attachShader(shaderProgram, vertexShader); this.gl.attachShader(shaderProgram, fragmentShader); this.gl.linkProgram(shaderProgram); // If creating the shader program failed, alert if (!this.gl.getProgramParameter(shaderProgram, this.gl.LINK_STATUS)) { console.log('Unable to initialize the shader program: ' + this.gl.getProgramInfoLog(shaderProgram)); return null; } return shaderProgram; } _loadShader(type, source) { const gl = this.gl; const shader = gl.createShader(type); // Send the source to the shader object gl.shaderSource(shader, source); // Compile the shader program gl.compileShader(shader); // See if it compiled successfully if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { console.log('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader)); gl.deleteShader(shader); return null; } return shader; } _initBuffers() { const gl = this.gl; const positionBuffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); const positions = [-1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0]; gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW); const facePos = [[0.0, 1.0], [1.0, 1.0], [1.0, 0.0], [0.0, 0.0]]; var texturePos = []; texturePos = texturePos.concat(...facePos); const texpositionBuffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, texpositionBuffer); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texturePos), gl.STATIC_DRAW); const indexBuffer = gl.createBuffer(); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer); const indices = [0, 1, 2, 0, 2, 3]; gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW); return { positions: positions, position: positionBuffer, texPosition: texpositionBuffer, indices: indexBuffer }; } _createTexture() { let texture = this.gl.createTexture(); this.gl.bindTexture(this.gl.TEXTURE_2D, texture); this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR); this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR); this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE); this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE); return texture; } _drawScene(w, h, isYUV) { this.gl.viewport(0, 0, w, h); this.gl.enable(this.gl.BLEND); this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE_MINUS_SRC_ALPHA); this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this._buffers.position); this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(this._buffers.positions), this.gl.STATIC_DRAW); this.gl.vertexAttribPointer(this._programInfo.attribLocations.vertexPosition, 2, this.gl.FLOAT, false, 0, 0); this.gl.enableVertexAttribArray(this._programInfo.attribLocations.vertexPosition); this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this._buffers.texPosition); this.gl.vertexAttribPointer(this._programInfo.attribLocations.texturePosition, 2, this.gl.FLOAT, false, 0, 0); this.gl.enableVertexAttribArray(this._programInfo.attribLocations.texturePosition); this.gl.bindBuffer(this.gl.ELEMENT_ARRAY_BUFFER, this._buffers.indices); let rgbatextunit = 2; let ytextunit = rgbatextunit + 1; let utextunit = rgbatextunit + 2; let vtextunit = rgbatextunit + 3; if (isYUV) { this.gl.activeTexture(this.gl.TEXTURE0 + ytextunit); this.gl.bindTexture(this.gl.TEXTURE_2D, this._ytexture); this.gl.activeTexture(this.gl.TEXTURE0 + utextunit); this.gl.bindTexture(this.gl.TEXTURE_2D, this._utexture); this.gl.activeTexture(this.gl.TEXTURE0 + vtextunit); this.gl.bindTexture(this.gl.TEXTURE_2D, this._vtexture); } else { this.gl.activeTexture(this.gl.TEXTURE0 + rgbatextunit); this.gl.bindTexture(this.gl.TEXTURE_2D, this._rgbatexture); } this.gl.useProgram(this._programInfo.program); this.gl.uniform1i(this._programInfo.uniformLocations.rgbatexture, rgbatextunit); this.gl.uniform1i(this._programInfo.uniformLocations.ytexture, ytextunit); this.gl.uniform1i(this._programInfo.uniformLocations.utexture, utextunit); this.gl.uniform1i(this._programInfo.uniformLocations.vtexture, vtextunit); this.gl.uniform1i(this._programInfo.uniformLocations.isyuv, isYUV ? 1 : 0); this.gl.drawElements(this.gl.TRIANGLES, 6, this.gl.UNSIGNED_SHORT, 0); } _calRect(x, y, width, height, canvasWidth, canvasHeight) { let x1 = x * 2. / canvasWidth - 1.; let y1 = (canvasHeight - y - height) * 2. / canvasHeight - 1.; let x2 = (x + width) * 2. / canvasWidth - 1.; let y2 = (canvasHeight - y) * 2. / canvasHeight - 1.; return [x1, y1, x2, y1, x2, y2, x1, y2]; } _clear() { this.gl.clearColor(0.0, 0.0, 0.0, 1.0); this.gl.clearDepth(1.0); this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT); } render(width, height, y, u, v) { const gl = this.gl; this._clear(); gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, this._ytexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, y); gl.activeTexture(gl.TEXTURE1); gl.bindTexture(gl.TEXTURE_2D, this._utexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width / 2, height / 2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, u); gl.activeTexture(gl.TEXTURE2); gl.bindTexture(gl.TEXTURE_2D, this._vtexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width / 2, height / 2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, v); this._buffers.positions = [-1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0]; this._drawScene(width, height, true); } renderYUV(width, height, data) { let y = data.slice(0, width * height); let u = data.slice(width * height, width * height * 5 / 4); let v = data.slice(width * height * 5 / 4, width * height * 3 / 2); const gl = this.gl; this._clear(); gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, this._ytexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, y); gl.activeTexture(gl.TEXTURE1); gl.bindTexture(gl.TEXTURE_2D, this._utexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width / 2, height / 2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, u); gl.activeTexture(gl.TEXTURE2); gl.bindTexture(gl.TEXTURE_2D, this._vtexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width / 2, height / 2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, v); this._buffers.positions = [-1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0]; this._drawScene(width, height, true); } drawDom(width, height, x, y, dom) { const gl = this.gl; gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, this._rgbatexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, dom); this._buffers.positions = this._calRect(x, y, dom.width, dom.height, width, height); this._drawScene(width, height, false); } } class WebGpuRender { constructor(gpu) { this.gpu = gpu; this.pipeline = null; this.matrixGroupInfo = null; this.depthTexture = null; this.textureGroupInfo = null; this.hasInited = false; this.buffers = this._initBuffer(); this._initPipeline().then(pipeline => { this.pipeline = pipeline; this.matrixGroupInfo = this._initMatrixGroupInfo(); this.hasInited = true; }); } destroy() { if (this.gpu) { this.gpu.device.destroy(); this.gpu = null; } this.hasInited = false; this.pipeline = null; this.matrixGroupInfo = null; this.depthTexture = null; this.textureGroupInfo = null; } _initBuffer() { const device = this.gpu.device; //顶点 const positions = new Float32Array([ // Front face -1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0]); const positionBuffer = device.createBuffer({ size: positions.byteLength, usage: window.GPUBufferUsage.VERTEX | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(positionBuffer, 0, positions); //纹理顶点 const texturePos = new Float32Array([ // Front face 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]); const texpositionBuffer = device.createBuffer({ size: texturePos.byteLength, usage: window.GPUBufferUsage.VERTEX | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(texpositionBuffer, 0, texturePos); //索引 const indices = new Uint16Array([ // Front face 0, 1, 2, 0, 2, 3]); const indexBuffer = device.createBuffer({ size: indices.byteLength, usage: window.GPUBufferUsage.INDEX | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(indexBuffer, 0, indices); return { positionBuffer, texpositionBuffer, indexBuffer }; } _initPipeline() { return new Promise((resolve, reject) => { const device = this.gpu.device; const format = this.gpu.format; const vsSource = ` @binding(0) @group(0) var uModelMatrix : mat4x4; @binding(1) @group(0) var uViewMatrix : mat4x4; @binding(2) @group(0) var uProjectionMatrix : mat4x4; struct VertexOutput { @builtin(position) Position : vec4, @location(0) vTexturePosition : vec2, } @vertex fn main( @location(0) aVertexPosition : vec4, @location(1) aTexturePosition : vec2 ) -> VertexOutput { var output : VertexOutput; var tmppos : vec4 = uProjectionMatrix * uViewMatrix * uModelMatrix * aVertexPosition; output.Position = vec4(tmppos.x, tmppos.y, (tmppos.z+1.)/2., tmppos.w); // webgl z [-1, 1], webgpu z [0, 1], 这里z做下调整 z-webgpu = (z-webgl+1)/2 output.vTexturePosition = aTexturePosition; return output; } `; // Fragment shader program const fsSource = ` @group(1) @binding(0) var mySampler: sampler; @group(1) @binding(1) var yTexture: texture_2d; @group(1) @binding(2) var uTexture: texture_2d; @group(1) @binding(3) var vTexture: texture_2d; const YUV2RGB : mat4x4 = mat4x4( 1.1643828125, 0, 1.59602734375, -.87078515625, 1.1643828125, -.39176171875, -.81296875, .52959375, 1.1643828125, 2.017234375, 0, -1.081390625, 0, 0, 0, 1); @fragment fn main( @location(0) vTexturePosition: vec2 ) -> @location(0) vec4 { var y : f32= textureSample(yTexture, mySampler, vTexturePosition).r; var u : f32 = textureSample(uTexture, mySampler, vTexturePosition).r; var v : f32 = textureSample(vTexture, mySampler, vTexturePosition).r; return vec4(y, u, v, 1.0)*YUV2RGB; } `; const descriptor = { layout: 'auto', vertex: { module: device.createShaderModule({ code: vsSource }), entryPoint: 'main', buffers: [{ arrayStride: 3 * 4, attributes: [{ shaderLocation: 0, offset: 0, format: 'float32x3' }] }, { arrayStride: 2 * 4, attributes: [{ shaderLocation: 1, offset: 0, format: 'float32x2' }] }] }, primitive: { topology: 'triangle-list' }, fragment: { module: device.createShaderModule({ code: fsSource }), entryPoint: 'main', targets: [{ format: format }] }, depthStencil: { depthWriteEnabled: true, depthCompare: 'less', format: 'depth24plus' } }; device.createRenderPipelineAsync(descriptor).then(pipeline => { resolve(pipeline); }).catch(e => { reject(e); }); }); } _initMatrixGroupInfo() { const device = this.gpu.device; const pipeline = this.pipeline; const zNear = 0.1; const zFar = 100.0; const projectionMatrix = create$1(); ortho(projectionMatrix, -1, 1, -1, 1, zNear, zFar); const modelMatrix = create$1(); identity(modelMatrix); const viewMatrix = create$1(); lookAt(viewMatrix, fromValues(0, 0, 0), fromValues(0, 0, -1), fromValues(0, 1, 0)); const modelMatrixBuffer = device.createBuffer({ size: 4 * 4 * 4, usage: window.GPUBufferUsage.UNIFORM | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(modelMatrixBuffer, 0, modelMatrix); const viewMatrixBuffer = device.createBuffer({ size: 4 * 4 * 4, usage: window.GPUBufferUsage.UNIFORM | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(viewMatrixBuffer, 0, viewMatrix); const projectMatrixBuffer = device.createBuffer({ size: 4 * 4 * 4, usage: window.GPUBufferUsage.UNIFORM | window.GPUBufferUsage.COPY_DST }); device.queue.writeBuffer(projectMatrixBuffer, 0, projectionMatrix); const group = device.createBindGroup({ label: 'group0', layout: pipeline.getBindGroupLayout(0), entries: [{ binding: 0, resource: { buffer: modelMatrixBuffer } }, { binding: 1, resource: { buffer: viewMatrixBuffer } }, { binding: 2, resource: { buffer: projectMatrixBuffer } }] }); return { modelMatrixBuffer, viewMatrixBuffer, projectMatrixBuffer, group }; } _initTextureGroupInfo(width, height) { const device = this.gpu.device; const pipeline = this.pipeline; const yTexture = device.createTexture({ size: [width, height], format: 'r8unorm', usage: window.GPUTextureUsage.TEXTURE_BINDING | window.GPUTextureUsage.COPY_DST | window.GPUTextureUsage.RENDER_ATTACHMENT }); const uTexture = device.createTexture({ size: [width / 2, height / 2], format: 'r8unorm', usage: window.GPUTextureUsage.TEXTURE_BINDING | window.GPUTextureUsage.COPY_DST | window.GPUTextureUsage.RENDER_ATTACHMENT }); const vTexture = device.createTexture({ size: [width / 2, height / 2], format: 'r8unorm', usage: window.GPUTextureUsage.TEXTURE_BINDING | window.GPUTextureUsage.COPY_DST | window.GPUTextureUsage.RENDER_ATTACHMENT }); const sampler = device.createSampler({ magFilter: 'linear', minFilter: 'linear' }); const group = device.createBindGroup({ label: 'group1', layout: pipeline.getBindGroupLayout(1), entries: [{ binding: 0, resource: sampler }, { binding: 1, resource: yTexture.createView() }, { binding: 2, resource: uTexture.createView() }, { binding: 3, resource: vTexture.createView() }] }); return { yTexture, uTexture, vTexture, group }; } _drawScene() { const device = this.gpu.device; const context = this.gpu.context; const commandEncoder = device.createCommandEncoder(); const view = context.getCurrentTexture().createView(); const renderPassDescriptor = { colorAttachments: [{ view: view, clearValue: { r: 0, g: 0, b: 0, a: 0.0 }, loadOp: 'clear', storeOp: 'store' }], depthStencilAttachment: { view: this.depthTexture.createView(), depthClearValue: 1.0, depthLoadOp: 'clear', depthStoreOp: 'store' } }; const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); passEncoder.setPipeline(this.pipeline); passEncoder.setBindGroup(0, this.matrixGroupInfo.group); passEncoder.setBindGroup(1, this.textureGroupInfo.group); passEncoder.setVertexBuffer(0, this.buffers.positionBuffer); passEncoder.setVertexBuffer(1, this.buffers.texpositionBuffer); passEncoder.setIndexBuffer(this.buffers.indexBuffer, 'uint16'); passEncoder.drawIndexed(6); passEncoder.end(); device.queue.submit([commandEncoder.finish()]); } renderYUV(width, height, data) { if (!this.hasInited) { return; } let y = data.slice(0, width * height); let u = data.slice(width * height, width * height * 5 / 4); let v = data.slice(width * height * 5 / 4, width * height * 3 / 2); const device = this.gpu.device; if (!this.depthTexture) { this.depthTexture = this.gpu.device.createTexture({ size: [width, height], format: 'depth24plus', usage: window.GPUTextureUsage.RENDER_ATTACHMENT }); } if (!this.textureGroupInfo) { this.textureGroupInfo = this._initTextureGroupInfo(width, height); } device.queue.writeTexture({ texture: this.textureGroupInfo.yTexture }, y, { bytesPerRow: width, rowsPerImage: height }, [width, height]); device.queue.writeTexture({ texture: this.textureGroupInfo.uTexture }, u, { bytesPerRow: width / 2, rowsPerImage: height / 2 }, [width / 2, height / 2]); device.queue.writeTexture({ texture: this.textureGroupInfo.vTexture }, v, { bytesPerRow: width / 2, rowsPerImage: height / 2 }, [width / 2, height / 2]); this._drawScene(); } clear() {} } class SingleWatermark { constructor(player) { this.player = player; this.TAG_NAME = "SingleWatermark"; this.configList = []; this.shadowRoot = null; this.shadowRootRealDom = null; this.shadowRootInnerDom = null; this.scale = 1; this.isDynamic = false; this._initDom(); this.player.debug.log(this.TAG_NAME, 'int'); } destroy() { this.configList = []; this.shadowRootInnerDom = null; this.isDynamic = false; this.scale = 1; if (this.shadowRoot) { this.player.$container.removeChild(this.shadowRootRealDom); this.shadowRoot = null; this.shadowRootRealDom = null; } this.player.debug.log(this.TAG_NAME, 'destroy'); } resize() { this.player.debug.log(this.TAG_NAME, 'resize()'); if (this.player._opt.aspectRatio === 'default' || isMobile()) { if (this.player.getRenderType() === RENDER_TYPE.canvas) { this._resizeDomForCanvas(); } else if (this.player.getRenderType() === RENDER_TYPE.video) { this._resizeDomForVideo(); } } else { this._resizeDomRatio(); } } _initDom() { const $container = this.player.$container; let shadowRoot = null; const otDiv = document.createElement('div'); otDiv.setAttribute('style', 'pointer-events: none !important;display: block !important;'); if (typeof otDiv.attachShadow === "function") { shadowRoot = otDiv.attachShadow({ mode: 'open' }); } else if (otDiv.shadowRoot) { shadowRoot = otDiv.shadowRoot; } else { shadowRoot = otDiv; } const innerDiv = document.createElement('div'); innerDiv.setAttribute('style', 'position: absolute; top: 0; left: 0; width: 0; height: 0;display: none;'); shadowRoot.appendChild(innerDiv); const nodeList = $container.children; const index = Math.floor(Math.random() * (nodeList.length - 1)) + 1; if (nodeList[index]) { $container.insertBefore(otDiv, nodeList[index]); } else { $container.appendChild(otDiv); } this.shadowRootInnerDom = innerDiv; this.shadowRootRealDom = otDiv; this.shadowRoot = shadowRoot; } update(config) { this._removeDom(); let watermarkConfigList = []; if (Array.isArray(config)) { watermarkConfigList = config; } else if (isNotEmptyObject(config)) { watermarkConfigList.push(config); } let defaultConfigList = watermarkConfigList.map(itemOptions => { return formatWatermarkOptions(itemOptions); }); this.configList = defaultConfigList; this._updateDom(); } _resizeDomForVideo() { const playerWidth = this.player.width; const playerHeight = this.player.height; const videoInfo = this.player.getVideoInfo(); if (!(videoInfo && videoInfo.height > 0 && videoInfo.width > 0)) { return; } let resizeWidth = videoInfo.width; let resizeHeight = videoInfo.height; const option = this.player._opt; let height = playerHeight; let width = playerWidth; if (option.hasControl && !option.controlAutoHide) { const controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; if (isMobile() && this.player.fullscreen && option.useWebFullScreen) { width -= controlHeight; } else { height -= controlHeight; } } const rotate = option.rotate; let left = (width - resizeWidth) / 2; let top = (height - resizeHeight) / 2; if (rotate === 270 || rotate === 90) { resizeWidth = videoInfo.height; resizeHeight = videoInfo.width; } const wScale = width / resizeWidth; const hScale = height / resizeHeight; let scale = wScale > hScale ? hScale : wScale; // if (!option.isResize) { if (wScale !== hScale) { scale = wScale + ',' + hScale; } } // if (option.isFullResize) { scale = wScale > hScale ? wScale : hScale; } let transform = "scale(" + scale + ")"; if (option.mirrorRotate === 'none') { if (rotate) { transform += ' rotate(' + rotate + 'deg)'; } } if (option.mirrorRotate === 'level') { transform += ' rotateY(180deg)'; // 水平镜像翻转 } else if (option.mirrorRotate === 'vertical') { transform += ' rotateX(180deg)'; // 垂直镜像翻转 } this.scale = ('' + scale).indexOf(',') !== -1 ? wScale : scale; this.shadowRootInnerDom.style.transform = transform; this.shadowRootInnerDom.style.left = left + 'px'; this.shadowRootInnerDom.style.top = top + 'px'; this.shadowRootInnerDom.style.width = videoInfo.width + 'px'; this.shadowRootInnerDom.style.height = videoInfo.height + 'px'; this.shadowRootInnerDom.style.display = 'block'; } _resizeDomForCanvas() { const videoInfo = this.player.getVideoInfo(); if (!(videoInfo && videoInfo.height > 0 && videoInfo.width > 0)) { return; } const option = this.player._opt; let width = this.player.width; let height = this.player.height; if (option.hasControl && !option.controlAutoHide) { const controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; if (isMobile() && this.player.fullscreen && option.useWebFullScreen) { width -= controlHeight; } else { height -= controlHeight; } } let resizeWidth = videoInfo.width; let resizeHeight = videoInfo.height; const rotate = option.rotate; let left = (width - resizeWidth) / 2; let top = (height - resizeHeight) / 2; if (rotate === 270 || rotate === 90) { resizeWidth = videoInfo.height; resizeHeight = videoInfo.width; } const wScale = width / resizeWidth; const hScale = height / resizeHeight; let scale = wScale > hScale ? hScale : wScale; // if (!option.isResize) { if (wScale !== hScale) { scale = wScale + ',' + hScale; } } // if (option.isFullResize) { scale = wScale > hScale ? wScale : hScale; } let transform = "scale(" + scale + ")"; if (option.mirrorRotate === 'none') { if (rotate) { transform += ' rotate(' + rotate + 'deg)'; } } if (option.mirrorRotate === 'level') { transform += ' rotateY(180deg)'; // 水平镜像翻转 } else if (option.mirrorRotate === 'vertical') { transform += ' rotateX(180deg)'; // 垂直镜像翻转 } this.shadowRootInnerDom.style.height = videoInfo.height + "px"; this.shadowRootInnerDom.style.width = videoInfo.width + "px"; this.shadowRootInnerDom.style.padding = "0"; this.shadowRootInnerDom.style.transform = transform; this.shadowRootInnerDom.style.left = left + "px"; this.shadowRootInnerDom.style.top = top + "px"; this.shadowRootInnerDom.style.display = 'block'; } _resizeDomRatio() { const videoInfo = this.player.getVideoInfo(); if (!(videoInfo && videoInfo.height > 0 && videoInfo.width > 0)) { return; } const ratioArray = this.player._opt.aspectRatio.split(':').map(Number); let width = this.player.width; let height = this.player.height; const option = this.player._opt; let controlHeight = 0; if (option.hasControl && !option.controlAutoHide) { controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; height -= controlHeight; } const videoRatio = videoInfo.width / videoInfo.height; const setupRatio = ratioArray[0] / ratioArray[1]; if (videoRatio > setupRatio) { const percentage = setupRatio * videoInfo.height / videoInfo.width; this.shadowRootInnerDom.style.width = `${percentage * 100}%`; this.shadowRootInnerDom.style.height = `calc(100% - ${controlHeight}px)`; this.shadowRootInnerDom.style.padding = `0 ${(width - width * percentage) / 2}px`; } else { const percentage = videoInfo.width / setupRatio / videoInfo.height; this.shadowRootInnerDom.style.width = '100%'; this.shadowRootInnerDom.style.height = `calc(${percentage * 100}% - ${controlHeight}px)`; this.shadowRootInnerDom.style.padding = `${(height - height * percentage) / 2}px 0`; } this.shadowRootInnerDom.style.display = 'block'; } _updateDom() { if (!this.shadowRoot) { return; } // foreach this.configList.forEach(defaultConfig => { const maskDiv = document.createElement('div'); let innerDom = null; if (defaultConfig.image && defaultConfig.image.src) { innerDom = document.createElement('img'); innerDom.style.height = '100%'; innerDom.style.width = '100%'; innerDom.style.objectFit = 'contain'; innerDom.src = defaultConfig.image.src; } else if (defaultConfig.text && defaultConfig.text.content) { innerDom = document.createTextNode(defaultConfig.text.content); } else if (defaultConfig.rect && defaultConfig.rect.color && defaultConfig.rect.width) { innerDom = document.createElement('div'); } else if (defaultConfig.html) { innerDom = document.createElement('div'); } else if (defaultConfig.line && defaultConfig.line.x1 && defaultConfig.line.y1 && defaultConfig.line.x2 && defaultConfig.line.y2) { innerDom = document.createElement('div'); } else if (defaultConfig.polygon && defaultConfig.polygon.list && defaultConfig.polygon.list.length >= 3) { innerDom = document.createElement('div'); } if (innerDom) { maskDiv.appendChild(innerDom); maskDiv.style.visibility = ''; maskDiv.style.position = "absolute"; maskDiv.style.display = 'block'; maskDiv.style['-ms-user-select'] = "none"; maskDiv.style['-moz-user-select'] = "none"; maskDiv.style['-webkit-user-select'] = "none"; maskDiv.style['-o-user-select'] = "none"; maskDiv.style['user-select'] = "none"; maskDiv.style['-webkit-touch-callout'] = "none"; maskDiv.style['-webkit-tap-highlight-color'] = "rgba(0,0,0,0)"; maskDiv.style['-webkit-text-size-adjust'] = "none"; maskDiv.style['-webkit-touch-callout'] = "none"; maskDiv.style.opacity = defaultConfig.opacity; if (isNumber(defaultConfig.left)) { maskDiv.style.left = defaultConfig.left + 'px'; } if (isNumber(defaultConfig.right)) { maskDiv.style.right = defaultConfig.right + 'px'; } if (isNumber(defaultConfig.top)) { maskDiv.style.top = defaultConfig.top + 'px'; } if (isNumber(defaultConfig.bottom)) { maskDiv.style.bottom = defaultConfig.bottom + 'px'; } if (defaultConfig.backgroundColor) { maskDiv.style.backgroundColor = defaultConfig.backgroundColor; } maskDiv.style.overflow = 'hidden'; maskDiv.style.zIndex = "9999999"; if (defaultConfig.image && defaultConfig.image.src) { maskDiv.style.width = defaultConfig.image.width + 'px'; maskDiv.style.height = defaultConfig.image.height + 'px'; } else if (defaultConfig.text && defaultConfig.text.content) { maskDiv.style.fontSize = defaultConfig.text.fontSize + 'px'; maskDiv.style.color = defaultConfig.text.color; if (defaultConfig.text.width) { maskDiv.style.width = defaultConfig.text.width + 'px'; } if (defaultConfig.text.height) { maskDiv.style.height = defaultConfig.text.height + 'px'; } } else if (defaultConfig.rect && defaultConfig.rect.color && defaultConfig.rect.width) { maskDiv.style.width = defaultConfig.rect.width + 'px'; maskDiv.style.height = defaultConfig.rect.height + 'px'; maskDiv.style.borderWidth = defaultConfig.rect.lineWidth + 'px'; maskDiv.style.borderStyle = 'solid'; maskDiv.style.borderColor = defaultConfig.rect.color; // if (defaultConfig.rect.fill) { const fillDiv = document.createElement('div'); fillDiv.style.position = "absolute"; fillDiv.style.width = '100%'; fillDiv.style.height = '100%'; fillDiv.style.backgroundColor = defaultConfig.rect.fill; if (defaultConfig.rect.fillOpacity) { fillDiv.style.opacity = defaultConfig.rect.fillOpacity; } maskDiv.appendChild(fillDiv); } } else if (defaultConfig.html) { maskDiv.style.width = '100%'; maskDiv.style.height = '100%'; maskDiv.innerHTML = defaultConfig.html; } else if (defaultConfig.line && defaultConfig.line.x1 && defaultConfig.line.y1 && defaultConfig.line.x2 && defaultConfig.line.y2) { this.settingLine(maskDiv, defaultConfig.line); } else if (defaultConfig.polygon && defaultConfig.polygon.list && defaultConfig.polygon.list.length >= 3) { maskDiv.style.width = '100%'; maskDiv.style.height = '100%'; let points = defaultConfig.polygon.list; const color = defaultConfig.polygon.color; const lineWidth = defaultConfig.polygon.lineWidth; points = points.sort((a, b) => { return (a.index || 0) - (b.index || 0); }); // inner bg if (defaultConfig.polygon.fill) { const fillDiv = document.createElement('div'); fillDiv.style.position = "absolute"; fillDiv.style.width = '100%'; fillDiv.style.height = '100%'; const clipPathValue = "polygon(" + points.map(p => `${p.x}px ${p.y}px`).join(", ") + ")"; fillDiv.style.clipPath = clipPathValue; fillDiv.style.backgroundColor = defaultConfig.polygon.fill; if (defaultConfig.polygon.fillOpacity) { fillDiv.style.opacity = defaultConfig.polygon.fillOpacity; } maskDiv.appendChild(fillDiv); } points.forEach((itemPoint, index) => { const lineDom = document.createElement('div'); // last if (index === points.length - 1) { const firstItemPoint = points[0]; const result = { x1: itemPoint.x, y1: itemPoint.y, x2: firstItemPoint.x, y2: firstItemPoint.y, color: color, lineWidth: lineWidth }; this.settingLine(lineDom, result); maskDiv.appendChild(lineDom); return; } const nextItemPoint = points[index + 1]; const result = { x1: itemPoint.x, y1: itemPoint.y, x2: nextItemPoint.x, y2: nextItemPoint.y, color: color, lineWidth: lineWidth }; this.settingLine(lineDom, result); maskDiv.appendChild(lineDom); }); } if (this.isDynamic) { this.shadowRootDynamicDom = maskDiv; } this.shadowRootInnerDom.appendChild(maskDiv); } }); } settingLine(dom, options) { const x1 = options.x1; const y1 = options.y1; const x2 = options.x2; const y2 = options.y2; var length = Math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2); var angle = Math.atan2(y2 - y1, x2 - x1) * 180 / Math.PI; dom.style.backgroundColor = options.color; dom.style.width = length + 'px'; dom.style.height = options.lineWidth + 'px'; // 可调整线的宽度 dom.style.position = 'absolute'; dom.style.top = y1 + 'px'; dom.style.left = x1 + 'px'; dom.style.transform = 'rotate(' + angle + 'deg)'; dom.style.transformOrigin = '0 0'; // 从线条的起点开始旋转 } remove() { this._removeDom(); } _removeDom() { if (this.shadowRootInnerDom) { this.shadowRootInnerDom.innerHTML = ''; } } } class CommonLoader$2 extends Emitter { constructor() { super(); this.videoInfo = { width: null, height: null, encType: null, encTypeCode: null }; this.init = false; this.prevAiFaceDetectTime = null; this.prevAiObjectDetectTime = null; this.prevOcclusionDetectTime = null; this.contentWatermark = null; this.aiContentWatermark = null; this.tempContentList = []; this.tempAiContentList = []; this.streamFps = 0; // this.isDestroyed = false; } destroy() { // this.isDestroyed = true; this.resetInit(); if (this.contentWatermark) { this.contentWatermark.destroy(); this.contentWatermark = null; } this.tempContentList = []; if (this.aiContentWatermark) { this.aiContentWatermark.destroy(); this.aiContentWatermark = null; } this.tempAiContentList = []; this.prevAiFaceDetectTime = null; this.prevAiObjectDetectTime = null; this.streamFps = 0; this.off(); } resetInit() { this.videoInfo = { width: null, height: null, encType: null, encTypeCode: null }; this.init = false; } getHasInit() { return this.init; } updateVideoInfo(data) { if (isNotEmpty(data.encTypeCode)) { this.videoInfo.encType = VIDEO_ENC_TYPE[data.encTypeCode]; this.videoInfo.encTypeCode = data.encTypeCode; } if (isNotEmpty(data.encType)) { this.videoInfo.encType = data.encType; } if (isNotEmpty(data.width)) { this.videoInfo.width = data.width; } if (isNotEmpty(data.height)) { this.videoInfo.height = data.height; } // video 基本信息 if (isNotEmpty(this.videoInfo.encType) && isNotEmpty(this.videoInfo.height) && isNotEmpty(this.videoInfo.width) && !this.init) { this.player.emit(EVENTS.videoInfo, this.videoInfo); this.init = true; } } getVideoInfo() { return this.videoInfo; } clearView() { // 清除水印 this.tempContentList = []; this.tempAiContentList = []; } resize() { this.player.debug.log('CommonVideo', 'resize()'); if (this.player._opt.aspectRatio === 'default' || isMobile()) { this._resize(); } else { this._resizeRatio(); } if (this.contentWatermark) { this.contentWatermark.resize(); } if (this.aiContentWatermark) { this.aiContentWatermark.resize(); } if (this.player.singleWatermark) { this.player.singleWatermark.resize(); } if (this.player.ghostWatermark) { this.player.ghostWatermark.resize(); } if (this.player.dynamicWatermark) { this.player.dynamicWatermark.resize(); } if (this.player.zoom && this.player.zooming) { const styleScale = this._getStyleScale(); this.player.zoom.updatePrevVideoElementStyleScale(styleScale); this.player.zoom.updateVideoElementScale(); } } _resizeRatio() { this.player.debug.log('CommonVideo', '_resizeRatio()'); const ratioArray = this.player._opt.aspectRatio.split(':').map(Number); let width = this.player.width; let height = this.player.height; const option = this.player._opt; let controlHeight = 0; if (option.hasControl && !option.controlAutoHide) { controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; height -= controlHeight; } const videoInfo = this.videoInfo; const videoRatio = videoInfo.width / videoInfo.height; const setupRatio = ratioArray[0] / ratioArray[1]; if (this.getType() === RENDER_TYPE.canvas) { this.$videoElement.style.left = '0'; this.$videoElement.style.top = '0'; this.$videoElement.style.transform = `none`; } if (this.getType() === RENDER_TYPE.video) { if (this.player._opt.videoRenderSupportScale) { this.$videoElement.style.objectFit = 'fill'; } } if (videoRatio > setupRatio) { const percentage = setupRatio * videoInfo.height / videoInfo.width; this.$videoElement.style.width = `${percentage * 100}%`; this.$videoElement.style.height = `calc(100% - ${controlHeight}px)`; this.$videoElement.style.padding = `0 ${(width - width * percentage) / 2}px`; } else { const percentage = videoInfo.width / setupRatio / videoInfo.height; this.$videoElement.style.width = '100%'; this.$videoElement.style.height = `calc(${percentage * 100}% - ${controlHeight}px)`; this.$videoElement.style.padding = `${(height - height * percentage) / 2}px 0`; } } play() { // 子类实现 } pause() { // 子类实现 } setRate(rate) { // 子类实现 } getType() { return ''; } getCanvasType() { return ''; } getCurrentTime() { return 0; } getStreamFps() { return this.streamFps; } isPlaying() { return true; } getPlaybackQuality() { return null; } // 子类实现 setStreamFps(fps) { this.player.debug.log(`CommonVideo`, 'setStreamFps', fps); this.streamFps = fps; } addContentToCanvas() { let contentList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; this.tempContentList = contentList; } addAiContentToCanvas() { let contentList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; this.tempAiContentList = contentList; } doAddContentToWatermark() { if (this.tempContentList.length > 0) { if (!this.contentWatermark) { this.contentWatermark = new SingleWatermark(this.player); this.contentWatermark.resize(); } const watermarkConfigList = []; this.tempContentList.forEach(item => { let result = { left: item.x || 0, top: item.y || 0 }; // text if (item.type === 'text') { result.text = { content: item.text, fontSize: item.fontSize || '14', color: item.color || '#000' }; } // rect else if (item.type === 'rect') { result.rect = { width: item.width, height: item.height, color: item.color || 'green', lineWidth: item.lineWidth || 2, fill: item.fill || '', fillOpacity: item.fillOpacity || 0.2 }; } else if (item.type === 'polygon') { result.polygon = { list: item.list, color: item.color || 'green', lineWidth: item.lineWidth || 2, fill: item.fill, fillOpacity: item.fillOpacity || 0.2 }; } else if (item.type === 'line') { result.line = { color: item.color || 'green', lineWidth: item.lineWidth || 2, x1: item.x1, y1: item.y1, x2: item.x2, y2: item.y2 }; } watermarkConfigList.push(result); }); this.contentWatermark.update(watermarkConfigList); } else { if (this.contentWatermark) { this.contentWatermark.remove(); } } } doAddAiContentToWatermark() { if (this.tempAiContentList.length > 0) { if (!this.aiContentWatermark) { this.aiContentWatermark = new SingleWatermark(this.player); this.aiContentWatermark.resize(); } const watermarkConfigList = this.tempAiContentList.map(item => { let result = { left: item.x, top: item.y }; if (item.type === 'text') { result.text = { content: item.text, fontSize: item.fontSize, color: item.color }; } else if (item.type === 'rect') { result.rect = { width: item.width, height: item.height, color: item.color, lineWidth: item.lineWidth }; } return result; }); this.aiContentWatermark.update(watermarkConfigList); } else { if (this.aiContentWatermark) { this.aiContentWatermark.remove(); } } } _getStyleScale() { const styleTransform = this.$videoElement.style.transform; let scaleStyleMatch = styleTransform.match(/scale\([0-9., ]*\)/g); let styleScale = ''; if (scaleStyleMatch && scaleStyleMatch[0]) { let scaleStyle = scaleStyleMatch[0].replace('scale(', '').replace(')', ''); styleScale = scaleStyle.split(','); } return styleScale; } } /* * FileSaver.js * A saveAs() FileSaver implementation. * * By Eli Grey, http://eligrey.com * * License : https://github.com/eligrey/FileSaver.js/blob/master/LICENSE.md (MIT) * source : http://purl.eligrey.com/github/FileSaver.js */ // The one and only way of getting global scope in all environments // https://stackoverflow.com/q/3277182/1008999 var _global = typeof window === 'object' && window.window === window ? window : typeof self === 'object' && self.self === self ? self : typeof global === 'object' && global.global === global ? global : undefined; function bom(blob, opts) { if (typeof opts === 'undefined') opts = { autoBom: false };else if (typeof opts !== 'object') { console.warn('Deprecated: Expected third argument to be a object'); opts = { autoBom: !opts }; } // prepend BOM for UTF-8 XML and text/* types (including HTML) // note: your browser will automatically convert UTF-16 U+FEFF to EF BB BF if (opts.autoBom && /^\s*(?:text\/\S*|application\/xml|\S*\/\S*\+xml)\s*;.*charset\s*=\s*utf-8/i.test(blob.type)) { return new Blob([String.fromCharCode(0xFEFF), blob], { type: blob.type }); } return blob; } function download(url, name, opts) { var xhr = new XMLHttpRequest(); xhr.open('GET', url); xhr.responseType = 'blob'; xhr.onload = function () { saveAs(xhr.response, name, opts); }; xhr.onerror = function () { console.error('could not download file'); }; xhr.send(); } function corsEnabled(url) { var xhr = new XMLHttpRequest(); // use sync to avoid popup blocker xhr.open('HEAD', url, false); try { xhr.send(); } catch (e) {} return xhr.status >= 200 && xhr.status <= 299; } // `a.click()` doesn't work for all browsers (#465) function click(node) { try { node.dispatchEvent(new MouseEvent('click')); } catch (e) { var evt = document.createEvent('MouseEvents'); evt.initMouseEvent('click', true, true, window, 0, 0, 0, 80, 20, false, false, false, false, 0, null); node.dispatchEvent(evt); } } // Detect WebView inside a native macOS app by ruling out all browsers // We just need to check for 'Safari' because all other browsers (besides Firefox) include that too // https://www.whatismybrowser.com/guides/the-latest-user-agent/macos var isMacOSWebView = _global.navigator && /Macintosh/.test(navigator.userAgent) && /AppleWebKit/.test(navigator.userAgent) && !/Safari/.test(navigator.userAgent); var saveAs = // probably in some web worker typeof window !== 'object' || window !== _global ? function saveAs() {/* noop */ } // Use download attribute first if possible (#193 Lumia mobile) unless this is a macOS WebView : 'download' in HTMLAnchorElement.prototype && !isMacOSWebView ? function saveAs(blob, name, opts) { var URL = _global.URL || _global.webkitURL; // Namespace is used to prevent conflict w/ Chrome Poper Blocker extension (Issue #561) var a = document.createElementNS('http://www.w3.org/1999/xhtml', 'a'); name = name || blob.name || 'download'; a.download = name; a.rel = 'noopener'; // tabnabbing // TODO: detect chrome extensions & packaged apps // a.target = '_blank' if (typeof blob === 'string') { // Support regular links a.href = blob; if (a.origin !== location.origin) { corsEnabled(a.href) ? download(blob, name, opts) : click(a, a.target = '_blank'); } else { click(a); } } else { // Support blobs a.href = URL.createObjectURL(blob); setTimeout(function () { URL.revokeObjectURL(a.href); }, 4E4); // 40s setTimeout(function () { click(a); }, 0); } } // Use msSaveOrOpenBlob as a second approach : 'msSaveOrOpenBlob' in navigator ? function saveAs(blob, name, opts) { name = name || blob.name || 'download'; if (typeof blob === 'string') { if (corsEnabled(blob)) { download(blob, name, opts); } else { var a = document.createElement('a'); a.href = blob; a.target = '_blank'; setTimeout(function () { click(a); }); } } else { navigator.msSaveOrOpenBlob(bom(blob, opts), name); } } // Fallback to using FileReader and a popup : function saveAs(blob, name, opts, popup) { // Open a popup immediately do go around popup blocker // Mostly only available on user interaction and the fileReader is async so... popup = popup || open('', '_blank'); if (popup) { popup.document.title = popup.document.body.innerText = 'downloading...'; } if (typeof blob === 'string') return download(blob, name, opts); var force = blob.type === 'application/octet-stream'; var isSafari = /constructor/i.test(_global.HTMLElement) || _global.safari; var isChromeIOS = /CriOS\/[\d]+/.test(navigator.userAgent); if ((isChromeIOS || force && isSafari || isMacOSWebView) && typeof FileReader !== 'undefined') { // Safari doesn't allow downloading of blob URLs var reader = new FileReader(); reader.onloadend = function () { var url = reader.result; url = isChromeIOS ? url : url.replace(/^data:[^;]*;/, 'data:attachment/file;'); if (popup) popup.location.href = url;else location = url; popup = null; // reverse-tabnabbing #460 }; reader.readAsDataURL(blob); } else { var URL = _global.URL || _global.webkitURL; var url = URL.createObjectURL(blob); if (popup) popup.location = url;else location.href = url; popup = null; // reverse-tabnabbing #460 setTimeout(function () { URL.revokeObjectURL(url); }, 4E4); // 40s } }; function createWebGPUContext(canvas) { return new Promise((resolve, reject) => { if (navigator.gpu) { navigator.gpu.requestAdapter().then(adapter => { if (adapter) { adapter.requestDevice().then(device => { if (device) { const context = canvas.getContext('webgpu'); if (context) { const format = navigator.gpu.getPreferredCanvasFormat(); context.configure({ device, format, alphaMode: 'opaque' }); resolve({ adapter, device, context, format }); } else { reject('WebGPU "context" create fail'); } } else { reject('WebGPU "device" request fail'); } }).catch(e => { reject('WebGPU "adapter.requestDevice()" fail'); }); } else { reject('WebGPU "adapter" request fail is empty'); } }).catch(e => { reject('WebGPU "navigator.gpu.requestAdapter()" fail'); }); } else { reject('WebGPU not support!!'); } }); } class Webgl2Render { constructor(canvas, gl) { this.canvas = canvas; this.gl = gl; const vertexShaderScript = ` attribute vec2 xy; varying highp vec2 uv; void main(void) { gl_Position = vec4(xy, 0.0, 1.0); // Map vertex coordinates (-1 to +1) to UV coordinates (0 to 1). // UV coordinates are Y-flipped relative to vertex coordinates. uv = vec2((1.0 + xy.x) / 2.0, (1.0 - xy.y) / 2.0); } `; const fragmentShaderScript = ` varying highp vec2 uv; uniform sampler2D texture; void main(void) { gl_FragColor = texture2D(texture, uv); } `; const vertexShader = gl.createShader(gl.VERTEX_SHADER); gl.shaderSource(vertexShader, vertexShaderScript); gl.compileShader(vertexShader); if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) { throw gl.getShaderInfoLog(vertexShader); } const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); gl.shaderSource(fragmentShader, fragmentShaderScript); gl.compileShader(fragmentShader); if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) { throw gl.getShaderInfoLog(fragmentShader); } const program = gl.createProgram(); gl.attachShader(program, vertexShader); gl.attachShader(program, fragmentShader); gl.linkProgram(program); if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { throw gl.getProgramInfoLog(program); } gl.useProgram(program); const buffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, buffer); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, -1, 1, 1, 1, 1, -1]), gl.STATIC_DRAW); const xy = gl.getAttribLocation(program, 'xy'); gl.vertexAttribPointer(xy, 2, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(xy); const texture = gl.createTexture(); gl.bindTexture(gl.TEXTURE_2D, texture); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); this.program = program; this.buffer = buffer; this.vertexShader = vertexShader; this.fragmentShader = fragmentShader; this.texture = texture; } destroy() { this.gl.deleteProgram(this.program); this.gl.deleteBuffer(this.buffer); this.gl.deleteTexture(this.texture); this.gl.deleteShader(this.vertexShader); this.gl.deleteShader(this.fragmentShader); this.program = null; this.buffer = null; this.vertexShader = null; this.fragmentShader = null; this.texture = null; } render(videoFrame) { this.canvas.width = videoFrame.displayWidth; this.canvas.height = videoFrame.displayHeight; const gl = this.gl; gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, videoFrame); gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); gl.clearColor(1, 0, 0, 1); gl.clear(gl.COLOR_BUFFER_BIT); gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); } } class CommonCanvasLoader extends CommonLoader$2 { constructor(player) { super(); this.player = player; const $canvasElement = document.createElement("canvas"); $canvasElement.style.position = "absolute"; $canvasElement.style.top = 0; $canvasElement.style.left = 0; this.$videoElement = $canvasElement; player.$container.appendChild(this.$videoElement); this.context2D = null; this.contextGl = null; this.webglRender = null; this.webglRectRender = null; this.webGPURender = null; this.isWebglContextLost = false; this.isWcsWebgl2 = false; this.bitmaprenderer = null; this.renderType = null; this.controlHeight = 0; this.proxyDestroyList = []; // this._initCanvasRender(); } destroy() { super.destroy(); if (this.proxyDestroyList.length > 0) { this.proxyDestroyList.forEach(itemDestroy => { itemDestroy && itemDestroy(); }); this.proxyDestroyList = []; } if (this.contextGl) { this.contextGl = null; } if (this.context2D) { this.context2D = null; } if (this.webglRender) { this.webglRender.destroy(); this.webglRender = null; } if (this.webglRectRender) { this.webglRectRender.destroy(); this.webglRectRender = null; } if (this.webGPURender) { this.webGPURender.destroy(); this.webGPURender = null; } if (this.bitmaprenderer) { this.bitmaprenderer = null; } this.renderType = null; this.isWebglContextLost = false; this.videoInfo = { width: '', height: '', encType: '' }; this.player.$container.removeChild(this.$videoElement); this.init = false; this.off(); } _initContext2D() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; this.context2D = this.$videoElement.getContext('2d', options); } _initContextGl() { this.player.events; this.contextGl = createContextGL(this.$videoElement); if (!this.contextGl) { this.player.debug.error('CommonCanvasLoader', `_initContextGl() createContextGL error`); return; } this._bindContextGlEvents(); this.webglRender = new WebglRender(this.contextGl, this.player._opt.openWebglAlignment); // this.webglRectRender = new WebglRectRender(this.contextGl, this.player._opt.openWebglAlignment); } _initContextGl2() { this.contextGl = createContextGL2(this.$videoElement); if (!this.contextGl) { this.player.debug.error('CommonCanvasLoader', `_initContextGl2() createContextGL2 error`); return; } this._bindContextGlEvents(2); try { this.webglRender = new Webgl2Render(this.$videoElement, this.contextGl); } catch (e) { this.player.debug.error('CommonCanvasLoader', `create webgl2Render error is ${e} and next use context2d.draw render`); this.contextGl = null; this.webglRender = null; this._initContext2D(); } } _bindContextGlEvents() { let version = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 1; const { proxy } = this.player.events; // 当WebGL上下文丢失时,WebGLRenderingContext会触发一个名为"webglcontextlost"的lostcontext事件。 // 这个事件可以用来通知应用程序WebGL上下文已经丢失,并进行一些必要的清理和恢复操作,例如释放GPU资源、重新加载纹理等等。 const webglContextLostProxy = proxy(this.$videoElement, 'webglcontextlost', event => { event.preventDefault(); this.player.debug.error('canvasVideo', `webglcontextlost error`, event); this.isWebglContextLost = true; // destroy webglRender if (this.webglRender) { this.player.debug.log('CommonCanvasLoader', `webglcontextlost error and destroy webglRender`); this.webglRender.destroy(); this.webglRender = null; } if (this.webglRectRender) { this.player.debug.log('CommonCanvasLoader', `webglcontextlost error and destroy webglRectRender`); this.webglRectRender.destroy(); this.webglRectRender = null; } this.contextGl = null; setTimeout(() => { this.player.debug.log('CommonCanvasLoader', `createContextGL() version ${version}`); if (version === 1) { this.contextGl = createContextGL(this.$videoElement); } else if (version === 2) { this.contextGl = createContextGL2(this.$videoElement); } this.player.debug.log('CommonCanvasLoader', `createContextGL success`); if (this.contextGl && this.contextGl.getContextAttributes) { const webglContextAttributes = this.contextGl.getContextAttributes(); if (webglContextAttributes && webglContextAttributes.stencil) { if (version === 1) { this.webglRender = new WebglRender(this.contextGl, this.player._opt.openWebglAlignment); // this.webglRectRender = new WebglRectRender(this.contextGl, this.player._opt.openWebglAlignment); } else if (version === 2) { this.webglRender = new Webgl2Render(this.$videoElement, this.contextGl); } this.isWebglContextLost = false; this.player.debug.log('CommonCanvasLoader', `webglcontextlost error reset and getContextAttributes().stencil is true`); } else { this.player.debug.error('CommonCanvasLoader', `webglcontextlost error, getContextAttributes().stencil is false`); // todo: 处理webglcontextlost error 事件 重新播放 this.player.emitError(EVENTS_ERROR.webglContextLostError); } } else { this.player.debug.error('CommonCanvasLoader', `webglcontextlost error, getContextAttributes().stencil is false`); // todo: 处理webglcontextlost error 事件 重新播放 this.player.emitError(EVENTS_ERROR.webglContextLostError); } }, 500); }); // 当WebGL上下文被恢复时,WebGLRenderingContext会触发一个名为"webglcontextrestored"的contextrestored事件。 const webglContextRestoredProxy = proxy(this.$videoElement, 'webglcontextrestored', event => { event.preventDefault(); this.player.debug.log('CommonCanvasLoader', `webglcontextrestored `, event); }); this.proxyDestroyList.push(webglContextLostProxy, webglContextRestoredProxy); } _initContextGPU() { createWebGPUContext(this.$videoElement).then(context => { if (context) { this.webGPURender = new WebGpuRender(context); this.player.debug.log('CommonCanvasLoader', `webGPURender init success`); } else { this.player.debug.warn('CommonCanvasLoader', `createWebGPUContext error is ${e} and next use webgl render`); this.renderType = CANVAS_RENDER_TYPE.webgl; this._initContextGl(); } }).catch(e => { this.player.debug.warn('CommonCanvasLoader', `createWebGPUContext error is ${e} and next use webgl render`); this.renderType = CANVAS_RENDER_TYPE.webgl; this._initContextGl(); }); } initCanvasViewSize() { this.$videoElement.width = this.videoInfo.width; this.$videoElement.height = this.videoInfo.height; this.resize(); } screenshot(filename, format, quality, type) { filename = filename || now$2(); type = type || SCREENSHOT_TYPE.download; let encoderOptions = 0.92; if (!SCREENSHOT_FORMAT_TYPE[format] && SCREENSHOT_TYPE[format]) { type = format; format = 'png'; quality = undefined; } if (typeof quality === "string") { type = quality; quality = undefined; } if (typeof quality !== 'undefined') { encoderOptions = Number(quality); } const formatType = SCREENSHOT_FORMAT_TYPE[format] || SCREENSHOT_FORMAT_TYPE.png; const dataURL = this.$videoElement.toDataURL(formatType, encoderOptions); if (type === SCREENSHOT_TYPE.base64) { return dataURL; } else { const file = dataURLToFile(dataURL); if (type === SCREENSHOT_TYPE.blob) { return file; } else if (type === SCREENSHOT_TYPE.download) { const suffix = formatType.split('/')[1]; saveAs(file, filename + '.' + suffix); } } } screenshotWatermark(options) { return new Promise((resolve, reject) => { if (isString(options)) { options = { filename: options }; } options = options || {}; options.width = this.videoInfo.width; options.height = this.videoInfo.height; options.filename = options.filename || now$2(); options.format = options.format ? SCREENSHOT_FORMAT_TYPE[options.format] : SCREENSHOT_FORMAT_TYPE.png; options.quality = Number(options.quality) || 0.92; options.type = options.type || SCREENSHOT_TYPE.download; const dataURL = this.$videoElement.toDataURL(options.format, options.quality); createImageWatermark(dataURL, options).then(dataURL2 => { if (options.type === SCREENSHOT_TYPE.base64) { resolve(dataURL); } else { const file = dataURLToFile(dataURL2); if (options.type === SCREENSHOT_TYPE.blob) { resolve(file); } else if (options.type === SCREENSHOT_TYPE.download) { resolve(); const suffix = options.format.split('/')[1]; saveAs(file, options.filename + '.' + suffix); } } }).catch(e => { reject(e); }); }); } // render() {} clearView() { super.clearView(); } play() {} pause() {} _resize() { this.player.debug.log('canvasVideo', '_resize()'); const option = this.player._opt; let width = this.player.width; let height = this.player.height; if (option.hasControl && !option.controlAutoHide) { const controlHeight = this.controlHeight; if (isMobile() && this.player.fullscreen && option.useWebFullScreen) { width -= controlHeight; } else { height -= controlHeight; } } let resizeWidth = this.$videoElement.width; let resizeHeight = this.$videoElement.height; const rotate = option.rotate; let left = (width - resizeWidth) / 2; let top = (height - resizeHeight) / 2; if (rotate === 270 || rotate === 90) { resizeWidth = this.$videoElement.height; resizeHeight = this.$videoElement.width; } const wScale = width / resizeWidth; const hScale = height / resizeHeight; let scale = wScale > hScale ? hScale : wScale; // if (isFalse(option.isResize)) { if (wScale !== hScale) { scale = wScale + ',' + hScale; } } // if (option.isFullResize) { scale = wScale > hScale ? wScale : hScale; } let transform = "scale(" + scale + ")"; if (option.mirrorRotate === 'none') { if (rotate) { transform += ' rotate(' + rotate + 'deg)'; } } if (option.mirrorRotate === 'level') { transform += ' rotateY(180deg)'; // 水平镜像翻转 } else if (option.mirrorRotate === 'vertical') { transform += ' rotateX(180deg)'; // 垂直镜像翻转 } this.$videoElement.style.height = this.videoInfo.height + "px"; this.$videoElement.style.width = this.videoInfo.width + "px"; this.$videoElement.style.padding = "0"; this.$videoElement.style.transform = transform; this.$videoElement.style.left = left + "px"; this.$videoElement.style.top = top + "px"; } initFps() {} setStreamFps(fps) {} getStreamFps() { return 25; } getType() { return RENDER_TYPE.canvas; } getCanvasType() { let result = this.renderType === CANVAS_RENDER_TYPE.webgpu ? CANVAS_RENDER_TYPE.webgpu : CANVAS_RENDER_TYPE.webgl; if (this.isWcsWebgl2) { result = CANVAS_RENDER_TYPE.webgl2; } return result; } } class CanvasVideoLoader extends CommonCanvasLoader { constructor(player) { super(player); this.yuvList = []; this.controlHeight = CONTROL_HEIGHT; this.tempTextCanvas = null; this.tempTextCanvasCtx = null; this.player.debug.log('CanvasVideo', 'init'); } destroy() { super.destroy(); this.yuvList = []; if (this.tempTextCanvas) { this.tempTextCanvasCtx.clearRect(0, 0, this.tempTextCanvas.width, this.tempTextCanvas.height); this.tempTextCanvas.width = 0; this.tempTextCanvas.height = 0; this.tempTextCanvas = null; } this.player.debug.log(`CanvasVideoLoader`, 'destroy'); } // 渲染类型 _initCanvasRender() { if (this.player._opt.useWCS && !this._supportOffscreen()) { this.renderType = CANVAS_RENDER_TYPE.webcodecs; // {alpha: false} if (isWebGL2Supported() && this.player._opt.wcsUseWebgl2Render) { this._initContextGl2(); if (this.webglRender) { this.isWcsWebgl2 = true; } } else { this._initContext2D(); } } else if (this.player._opt.useMSE && this.player._opt.mseUseCanvasRender) { this.renderType = CANVAS_RENDER_TYPE.mse; this._initContext2D(); } else if (this.player.isOldHls() && this.player._opt.useCanvasRender) { this.renderType = CANVAS_RENDER_TYPE.hls; this._initContext2D(); } else if (this.player.isWebrtcH264() && this.player._opt.webrtcUseCanvasRender) { this.renderType = CANVAS_RENDER_TYPE.webrtc; this._initContext2D(); } else if (this._supportOffscreen()) { this.renderType = CANVAS_RENDER_TYPE.offscreen; this._bindOffscreen(); } else if (this.player._opt.useWebGPU) { this.renderType = CANVAS_RENDER_TYPE.webgpu; this._initContextGPU(); } else { this.renderType = CANVAS_RENDER_TYPE.webgl; this._initContextGl(); } } _supportOffscreen() { return supportOffscreen(this.$videoElement) && this.player._opt.useOffscreen; } // _bindOffscreen() { this.bitmaprenderer = this.$videoElement.getContext('bitmaprenderer'); } render(msg) { this.yuvList.push(msg); this.startRender(); } startRender() { while (true) { if (this.yuvList.length <= 0) { break; } const yuv = this.yuvList.shift(); this.doRender(yuv); } } // doRender(msg) { // if (this.renderType !== CANVAS_RENDER_TYPE.mse) { const tempStats = { ts: msg.ts || 0, fps: true }; this.player.updateStats(tempStats); } switch (this.renderType) { case CANVAS_RENDER_TYPE.offscreen: this.bitmaprenderer.transferFromImageBitmap(msg.buffer); break; case CANVAS_RENDER_TYPE.webgl: case CANVAS_RENDER_TYPE.webgpu: if (this.isWebglContextLost) { this.player.debug.warn('CanvasVideoLoader', 'doRender() and webgl context is lost'); return; } let yuvData = msg.output; // face ai if (this.player.faceDetectActive && this.player.ai && this.player.ai.faceDetector) { if (this.prevAiFaceDetectTime === null) { this.prevAiFaceDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiFaceDetectTime >= this.player._opt.aiFaceDetectInterval) { yuvData = this.player.ai.faceDetector.detect({ width: this.$videoElement.width, height: this.$videoElement.height, data: msg.output, ts: msg.ts || 0 }); this.prevAiFaceDetectTime = _nowTime; } } // object ai if (this.player.objectDetectActive && this.player.ai && this.player.ai.objectDetector) { if (this.prevAiObjectDetectTime === null) { this.prevAiObjectDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiObjectDetectTime >= this.player._opt.aiObjectDetectInterval) { yuvData = this.player.ai.objectDetector.detect({ width: this.$videoElement.width, height: this.$videoElement.height, data: msg.output, ts: msg.ts || 0 }); this.prevAiObjectDetectTime = _nowTime; } } // occlusion ai if (this.player.occlusionDetectActive && this.player.ai && this.player.ai.occlusionDetector) { if (this.prevAiOcclusionDetectTime === null) { this.prevAiOcclusionDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiOcclusionDetectTime >= this.player._opt.aiOcclusionDetectInterval) { const result = this.player.ai.occlusionDetector.check({ width: this.$videoElement.width, height: this.$videoElement.height, data: msg.output }); this.prevAiOcclusionDetectTime = _nowTime; if (result) { // emit this.player.emit(EVENTS.aiOcclusionDetectResult, { ts: msg.ts || 0 }); } } } if (this.player.imageDetectActive && this.player.ai && this.player.ai.imageDetector) { const result = this.player.ai.imageDetector.check({ width: this.$videoElement.width, height: this.$videoElement.height, data: msg.output }); if (result && result.data) { this.player.emit(EVENTS.aiOcclusionDetectResult, { type: result.type, ts: msg.ts || 0 }); if (this.player._opt.aiImageDetectDrop) { this.player.debug.log('CanvasVideoLoader', `doRender() and ai image detect result type is ${result.type} and drop`); return; } } } if (this.renderType === CANVAS_RENDER_TYPE.webgpu) { try { if (!this.webGPURender) { this.player.debug.warn('CanvasVideoLoader', 'doRender webgpu render is not init'); return; } this.webGPURender.renderYUV(this.$videoElement.width, this.$videoElement.height, yuvData); } catch (e) { this.player.debug.error(`CanvasVideoLoader`, `doRender webgpu render and error: ${e.toString()}`); } } else if (this.renderType === CANVAS_RENDER_TYPE.webgl) { try { this.webglRender.renderYUV(this.$videoElement.width, this.$videoElement.height, yuvData); } catch (e) { this.player.debug.error(`CanvasVideoLoader`, `doRender webgl render context is lost ${this.contextGl && this.contextGl.isContextLost()} and error: ${e.toString()}`); } } break; case CANVAS_RENDER_TYPE.webcodecs: if (this.webglRender) { this.webglRender.render(msg.videoFrame); closeVideoFrame(msg.videoFrame); } else if (this.context2D) { if (isFunction$1(msg.videoFrame.createImageBitmap)) { try { msg.videoFrame.createImageBitmap().then(image => { this.context2D.drawImage(image, 0, 0, this.$videoElement.width, this.$videoElement.height); closeVideoFrame(msg.videoFrame); }); } catch (e) {} } else { this.context2D.drawImage(msg.videoFrame, 0, 0, this.$videoElement.width, this.$videoElement.height); closeVideoFrame(msg.videoFrame); } } else { this.player.debug.warn('CanvasVideoLoader', 'doRender() and webcodecs context is lost'); } break; case CANVAS_RENDER_TYPE.mse: this.context2D.drawImage(msg.$video, 0, 0, this.$videoElement.width, this.$videoElement.height); break; case CANVAS_RENDER_TYPE.hls: this.context2D.drawImage(msg.$video, 0, 0, this.$videoElement.width, this.$videoElement.height); break; case CANVAS_RENDER_TYPE.webrtc: this.context2D.drawImage(msg.$video, 0, 0, this.$videoElement.width, this.$videoElement.height); break; } let currentPts = msg.ts || 0; if (this.renderType === CANVAS_RENDER_TYPE.mse) { currentPts = parseInt(msg.$video.currentTime * 1000, 10) + (this.player.mseDecoder.firstRenderTime || 0); } this.player.updateCurrentPts(currentPts); this.doAddContentToWatermark(); this.doAddAiContentToWatermark(); } // clearView() { super.clearView(); switch (this.renderType) { case CANVAS_RENDER_TYPE.offscreen: createEmptyImageBitmap(this.$videoElement.width, this.$videoElement.height).then(imageBitMap => { this.bitmaprenderer.transferFromImageBitmap(imageBitMap); }); break; case CANVAS_RENDER_TYPE.webgl: this.contextGl.clear(this.contextGl.COLOR_BUFFER_BIT); break; case CANVAS_RENDER_TYPE.webgpu: this.webGPURender.clear(); break; case CANVAS_RENDER_TYPE.webcodecs: if (this.contextGl) { this.contextGl.clear(this.contextGl.COLOR_BUFFER_BIT); } else if (this.context2D) { this.context2D.clearRect(0, 0, this.$videoElement.width, this.$videoElement.height); } break; case CANVAS_RENDER_TYPE.mse: this.context2D.clearRect(0, 0, this.$videoElement.width, this.$videoElement.height); break; case CANVAS_RENDER_TYPE.hls: this.context2D.clearRect(0, 0, this.$videoElement.width, this.$videoElement.height); break; case CANVAS_RENDER_TYPE.webrtc: this.context2D.clearRect(0, 0, this.$videoElement.width, this.$videoElement.height); break; } } _initTempTextCanvas() { this.tempTextCanvas = document.createElement('canvas'); this.tempTextCanvasCtx = this.tempTextCanvas.getContext('2d'); this.tempTextCanvas.width = 600; this.tempTextCanvas.height = 20; } // 废弃 doAddContentToCanvas() { if (this.tempContentList.length > 0 && this.context2D) { strokeRectOrTextInCanvas({ ctx: this.context2D, list: this.tempContentList }); } } // 废弃 doAddContentToWebGlCanvas() { if (this.tempContentList.length > 0 && this.contextGl && this.webglRectRender) { this.tempContentList.forEach(item => { const x = item.x; const y = item.y; if (item.type === 'rect') { const width = item.width; const height = item.height; const lineColor = hexToRgba(item.color || '#008000'); const lineWidth = item.lineWidth || 4; if (!width || !height) return; this.webglRectRender.drawBox({ x, y, width, height, lineColor, lineWidth, canvasWidth: this.$videoElement.width, canvasHeight: this.$videoElement.height }); } else if (item.type === 'text') { const text = item.text || ''; if (!text) return; const fontSize = item.fontSize || 20; const textColor = item.color || '#008000'; if (!this.tempTextCanvas) { this._initTempTextCanvas(); } this.tempTextCanvasCtx.clearRect(0, 0, this.tempTextCanvas.width, this.tempTextCanvas.height); this.tempTextCanvasCtx.font = `${fontSize}px Arial`; this.tempTextCanvasCtx.fillStyle = textColor; this.tempTextCanvasCtx.textBaseline = 'top'; this.tempTextCanvasCtx.fillText(text, 0, 0); this.webglRender.drawDom(this.$videoElement.width, this.$videoElement.height, x, y, this.tempTextCanvas); } }); } } } class VideoLoader extends CommonLoader$2 { constructor(player) { super(); this.player = player; this.TAG_NAME = 'Video'; const $videoElement = document.createElement('video'); const $canvasElement = document.createElement('canvas'); $videoElement.muted = true; $videoElement.style.position = "absolute"; $videoElement.style.top = 0; $videoElement.style.left = 0; this._delayPlay = false; player.$container.appendChild($videoElement); this.$videoElement = $videoElement; this.$canvasElement = $canvasElement; this.canvasContext = $canvasElement.getContext('2d'); this.mediaStream = null; this.vwriter = null; if (player.canVideoTrackWritter() && supportMediaStreamTrack() && supportMediaStream()) { this.trackGenerator = new MediaStreamTrackGenerator({ kind: 'video' }); this.mediaStream = new MediaStream([this.trackGenerator]); $videoElement.srcObject = this.mediaStream; this.vwriter = this.trackGenerator.writable.getWriter(); } this.fixChromeVideoFlashBug(); this.fixMobileAutoFullscreen(); this.resize(); this.eventListenList = []; this.isRenderRetryPlaying = false; this.isRenderRetryPlayingTimes = 0; this.isRetryPlaying = false; this.isRetryPlayingTimes = 0; this.checkVideoCanplayTimeout = null; const isSupportVideoFrameCallback = supportVideoFrameCallback(); this.supportVideoFrameCallbackHandle = null; const { proxy } = this.player.events; const canplayProxyDestroy = proxy(this.$videoElement, 'canplay', () => { this.player.debug.log('Video', 'canplay'); if (this.player.isDestroyedOrClosed()) { return; } if (this._delayPlay) { this.clearCheckVideoCanplayTimeout(); this._play(); // 如果支持videoFrameCallback,那么就使用videoFrameCallback if (supportVideoFrameCallback()) { if (!this.supportVideoFrameCallbackHandle) { this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } } else { this.player.debug.warn('Video', 'not support requestVideoFrameCallback and use timeupdate event to update stats'); } } }); const waitingProxyDestroy = proxy(this.$videoElement, 'waiting', () => { // this.player.emit(EVENTS.videoWaiting); this.player.debug.log('Video', 'waiting'); }); const loadedmetadataDestroy = proxy(this.$videoElement, 'loadedmetadata', () => { this.player.debug.log('Video', 'loadedmetadata'); }); // 视频的进度条都是250毫秒更新一次,所以这里也是250毫秒更新一次 const timeupdateProxyDestroy = proxy(this.$videoElement, 'timeupdate', event => { // this.player.debug.log('Video', 'timeupdate', event.timeStamp); if (this.player.isDestroyedOrClosed()) { return; } // if (isFalse(isSupportVideoFrameCallback)) { // ms const timeStamp = parseInt(this.getCurrentTime() * 1000, 10); // if (player.isWebrtcH264() || this.player.isOldHls() || this.player.isAliyunRtc()) { this.player.emit(EVENTS.timeUpdate, timeStamp); player.handleRender(); // player.updateStats({ fps: true, ts: timeStamp, dts: timeStamp }); } } if (this.player.isMseDecoderUseWorker()) { this.player.decoderWorker.updateVideoTimestamp(this.getCurrentTime()); // 这里监倍率变化。 this._handleUpdatePlaybackRate(); } }); const errorProxyDestroy = proxy(this.$videoElement, 'error', () => { this.player.debug.error('Video', "Error Code " + this.$videoElement.error.code + ' ' + VIDEO_ERROR_CODE_DESC[this.$videoElement.error.code] + "; Details: " + this.$videoElement.error.message); // for mse error if (this.player.isUseMSE()) { this.$videoElement.error.code; const errorMessage = this.$videoElement.error.message; if (errorMessage.indexOf(VIDEO_ERROR_MESSAGE.videoDecoderInitializationFailed) !== -1) { // 需要特殊标识下, this.player.isMSEVideoDecoderInitializationFailedNotSupportHevc = true; } // if (errorMessage.indexOf(VIDEO_ERROR_MESSAGE.audioDecoderError) !== -1) { this.player.isMSEAudioDecoderError = true; } } // for hls video play if (this.player.isHlsCanVideoPlay()) ; }); this.eventListenList.push(canplayProxyDestroy, waitingProxyDestroy, timeupdateProxyDestroy, errorProxyDestroy, loadedmetadataDestroy); if (this.player.isMseDecoderUseWorker()) { const ratechangeProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.ratechange, () => { this.player.debug.log(this.TAG_NAME, 'video playback Rate change', this.$videoElement && this.$videoElement.playbackRate); if (this.$videoElement && this.$videoElement.paused) { this.player.debug.log(this.TAG_NAME, 'video is paused and next try to replay'); this.$videoElement.play(); } }); this.eventListenList.push(ratechangeProxyDestroy); } this.player.debug.log('Video', 'init'); } destroy() { super.destroy(); this.clearCheckVideoCanplayTimeout(); this._cancelVideoFrameCallback(); if (this.eventListenList) { this.eventListenList.forEach(item => { item(); }); this.eventListenList = []; } this.isRenderRetryPlaying = false; this.isRenderRetryPlayingTimes = 0; this.isRetryPlaying = false; this.isRetryPlayingTimes = 0; if (this.player._opt.videoRenderSupportScale && this._isNeedAddBackDropFilter()) { const $container = this.player.$container; $container.style.backdropFilter = 'none'; $container.style.transform = 'none'; } this.$canvasElement.height = 0; this.$canvasElement.width = 0; this.$canvasElement = null; this.canvasContext = null; if (this.$videoElement) { this.$videoElement.pause(); this.$videoElement.currentTime = 0; if (this.$videoElement.srcObject) { this.$videoElement.srcObject = null; this.$videoElement.removeAttribute('srcObject'); } if (this.$videoElement.src) { this.$videoElement.src = ''; this.$videoElement.removeAttribute('src'); } try { this.$videoElement.load(); } catch (e) { // ignore } this.player.$container.removeChild(this.$videoElement); this.$videoElement = null; } if (this.trackGenerator) { this.trackGenerator.stop(); this.trackGenerator = null; } if (this.vwriter) { this.vwriter.close(); this.vwriter = null; } this._delayPlay = false; if (this.mediaStream) { this.mediaStream.getTracks().forEach(track => track.stop()); this.mediaStream = null; } this.off(); this.player.debug.log('Video', 'destroy'); } videoFrameCallback(nowTime) { let metaData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (this.player.isDestroyedOrClosed()) { this.player.debug.log('Video', 'videoFrameCallback() and isDestroyedOrClosed and return'); return; } // this.player.debug.log('Video', 'videoFrameCallback', nowTime, metaData); /** * metaData: * presentationTime:用户代理提交帧以进行合成的时间。 * expectedDisplayTime:用户代理预计将显示帧的时间。 * width:帧的宽度。 * height:帧的高度。 * mediaTime:帧的媒体时间。它在video.currentTime时间线上的时间戳 * presentedFrames:自用户代理启动以来已提交的帧数。 * processingDuration:用户代理处理帧的时间。 */ this.player.handleRender(); const ts = parseInt(Math.max(metaData.mediaTime, this.getCurrentTime()) * 1000, 10) || 0; if (this.player.isUseHls265UseMse()) { this.player.updateStats({ fps: true, ts: ts }); } else if (this.player.isMseDecoderUseWorker()) { if (!this.player._times.videoStart) { this.player._times.videoStart = now$2(); this.player.handlePlayToRenderTimes(); } const firstRenderTime = this.player._mseWorkerData.firstRenderTime || 0; const newTs = ts + firstRenderTime; this.player.updateStats({ fps: true, dfps: true, ts: newTs, // start with firstRenderTime mseTs: ts // start 0 }); this.player.emit(EVENTS.timeUpdate, newTs); if (isFalse(this.getHasInit()) && metaData.width && metaData.height) { const info = { width: metaData.width, height: metaData.height }; this.updateVideoInfo(info); this.initCanvasViewSize(); } } if (this.player.isWebrtcH264() || this.player.isOldHls() || this.player.isAliyunRtc()) { this.player.emit(EVENTS.timeUpdate, ts); // update width and height if (isFalse(this.getHasInit()) && metaData.width && metaData.height) { const info = { width: metaData.width, height: metaData.height }; // hls 会有encTypeCode if (!this.videoInfo.encTypeCode && !this.player.isOldHls()) { info.encTypeCode = VIDEO_ENC_CODE.h264; } this.updateVideoInfo(info); } // this.player.updateStats({ fps: true, ts: ts, dts: ts }); this.player.updateCurrentPts(ts); this.doAddContentToWatermark(); } else if (isTrue(this.player._opt.useMSE) && isFalse(this.player._opt.mseUseCanvasRender)) { if (this.player.mseDecoder) { let ts = parseInt(Math.max(metaData.mediaTime, this.getCurrentTime()) * 1000, 10) + (this.player.mseDecoder.firstRenderTime || 0); this.player.updateCurrentPts(ts); } else if (this.player._opt.mseDecoderUseWorker) { let ts = parseInt(Math.max(metaData.mediaTime, this.getCurrentTime()) * 1000, 10) + (this.player._mseWorkerData.firstRenderTime || 0); this.player.updateCurrentPts(ts); } this.doAddContentToWatermark(); } this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } /** * 修复chrome系浏览器,在设置video的objectFit属性后,video标签会无缘无故的闪烁, * 但是添加了backdropFilter属性之后,就不会闪烁了。 */ fixChromeVideoFlashBug() { if (this.player._opt.videoRenderSupportScale) { if (this._isNeedAddBackDropFilter()) { const $container = this.player.$container; $container.style.backdropFilter = 'blur(0px)'; $container.style.transform = 'translateZ(0)'; } } } // 使用video标签在html5中播放视频时,视频自动全屏播放 // ios端video标签必须加webkit-playsinline、playsinline属性 // android端部分视频也会存在自动全屏问题,添加webkit-playsinline属性 fixMobileAutoFullscreen() { const _isIos = isIOS(); const _isAndroid = isAndroid(); if (_isIos || _isAndroid) { this.player.debug.log('Video', `fixMobileAutoFullscreen and isIOS ${_isIos} and isAndroid ${_isAndroid}`); // 解决视频在ios端播放默认全屏问题 this.$videoElement.setAttribute('webkit-playsinline', 'true'); this.$videoElement.setAttribute('playsinline', 'true'); // 解决视频在android端播放默认全屏问题 this.$videoElement.setAttribute('x5-video-player-type', 'h5-page'); } } _isNeedAddBackDropFilter() { const browser = getBrowser(); const type = browser.type.toLowerCase(); if (type === 'chrome' || type === 'edge') { return true; } return false; } isPaused() { let result = true; if (this.$videoElement) { result = this.$videoElement.paused; } return result; } isPause() { return this.isPaused(); } /** * HAVE_NOTHING 0 没有关于音频/视频是否就绪的信息 * HAVE_METADATA 1 音频/视频已初始化 * HAVE_CURRENT_DATA 2 数据已经可以播放 (当前位置已经加载) 但没有数据能播放下一帧的内容 * HAVE_FUTURE_DATA 3 当前及至少下一帧的数据是可用的 (换句话来说至少有两帧的数据) * HAVE_ENOUGH_DATA 4 可用数据足以开始播放 - 如果网速得到保障 那么视频可以一直播放到底 */ _getVideoReadyState() { let result = 0; if (this.$videoElement) { result = this.$videoElement.readyState; } return result; } /** * get video current time * @returns {number} * @private */ _getVideoCurrentTime() { let result = 0; if (this.$videoElement) { result = this.$videoElement.currentTime; } return result; } play() { if (this.$videoElement) { const readyState = this._getVideoReadyState(); this.player.debug.log('Video', `play and readyState: ${readyState}`); if (readyState === 0) { this.player.debug.log('Video', 'readyState is 0 and set _delayPlay to true and listen canplay event to play'); this._delayPlay = true; // 延迟去检查下readyState是否变成了4,如果没有变成canplay状态,则需要手动尝试播放看看,如果播放失败了,则需要做降级处理。 if (this.checkVideoCanplayTimeout === null) { this.checkVideoCanplayTimeout = setTimeout(() => { this.clearCheckVideoCanplayTimeout(); if (this.player.isDestroyedOrClosed()) { return; } // 如果还没有可以播放 if (isFalse(this.isPlaying())) { const bufferStore = this._getBufferStore(); this.player.debug.warn('Video', `checkVideoCanplayTimeout and video is not playing and buffer store is ${bufferStore} and retry play`); this.$videoElement.currentTime = bufferStore; this._replay(); } }, 1 * 1000); } return; } this._play(); } } _play() { this.$videoElement && this.$videoElement.play().then(() => { this._delayPlay = false; this.player.debug.log('Video', '_play success'); if (!this.isPlaying()) { setTimeout(() => { this._replay(); }, 100); } else { // play success this.player.emit(EVENTS.removeLoadingBgImage); // reset retry times this.isRetryPlayingTimes = 0; this.isRetryPlaying = false; } }).catch(e => { if (this.player.isDestroyedOrClosed()) { this.player.debug.log('Video', '_play error and player is isDestroyedOrClosed and return'); return; } this.player.debug.error('Video', '_play error', e); this.isRetryPlaying = false; // todo try to play again setTimeout(() => { this._replay(); }, 100); }); } _replay() { if (!this.isPlaying() && isFalse(this.player.isDestroyedOrClosed()) && isFalse(this.isRetryPlaying)) { this.isRetryPlaying = true; if (this.isRetryPlayingTimes >= VIDEO_ELEMENT_RETRY_PLAY_MAX_TIME) { if (this.player.isWebrtcH264()) { this.player.debug.error('Video', `_replay(webrtc H264) then but not playing and retry play times is ${this.isRetryPlayingTimes} and emit error`); this.player.emitError(EVENTS_ERROR.videoElementPlayingFailedForWebrtc); } else { this.player.debug.error('Video', `_replay then but not playing and retry play times is ${this.isRetryPlayingTimes} and emit error to use canvas render`); this.player.emitError(EVENTS_ERROR.videoElementPlayingFailed); } return; } this.player.debug.warn('Video', `_play then but not playing and retry play and isRetryPlayingTimes is ${this.isRetryPlayingTimes}`); this._play(); this.isRetryPlayingTimes++; } else { this.player.debug.log('Video', `_replay() and isPlaying is ${this.isPlaying()} and isRetryPlaying is ${this.isRetryPlaying} and isDestroyedOrClosed is ${this.player.isDestroyedOrClosed()} and return;`); } } pause(isNow) { this.player.debug.log(this.TAG_NAME, 'pause and isNow is ' + isNow); // 预防 // https://developer.chrome.com/blog/play-request-was-interrupted/ // http://alonesuperman.com/?p=23 if (this.isPlaying()) { if (isNow) { this.$videoElement && this.$videoElement.pause(); this._cancelVideoFrameCallback(); } else { setTimeout(() => { this.$videoElement && this.$videoElement.pause(); this._cancelVideoFrameCallback(); }, 100); } } } clearView() { super.clearView(); if (this.$videoElement) { this.$videoElement.pause(); this.$videoElement.currentTime = 0; if (this.$videoElement.src) { this.$videoElement.src = ''; this.$videoElement.removeAttribute('src'); } if (this.$videoElement.srcObject) { this.$videoElement.srcObject = null; this.$videoElement.removeAttribute('srcObject'); } } } screenshot(filename, format, quality, type) { if (!this._canScreenshot()) { this.player.debug.warn('Video', `screenshot failed, video is not ready and stats is ${this._getVideoReadyState()}`); return null; } filename = filename || now$2(); type = type || SCREENSHOT_TYPE.download; let encoderOptions = 0.92; if (!SCREENSHOT_FORMAT_TYPE[format] && SCREENSHOT_TYPE[format]) { type = format; format = 'png'; quality = undefined; } if (typeof quality === "string") { type = quality; quality = undefined; } if (typeof quality !== 'undefined') { encoderOptions = Number(quality); } const $video = this.$videoElement; let canvas = this.$canvasElement; canvas.width = $video.videoWidth; canvas.height = $video.videoHeight; this.canvasContext.drawImage($video, 0, 0, canvas.width, canvas.height); const formatType = SCREENSHOT_FORMAT_TYPE[format] || SCREENSHOT_FORMAT_TYPE.png; const dataURL = canvas.toDataURL(formatType, encoderOptions); // release memory this.canvasContext.clearRect(0, 0, canvas.width, canvas.height); canvas.width = 0; canvas.height = 0; if (type === SCREENSHOT_TYPE.base64) { return dataURL; } else { const file = dataURLToFile(dataURL); if (type === SCREENSHOT_TYPE.blob) { return file; } else if (type === SCREENSHOT_TYPE.download) { const suffix = formatType.split('/')[1]; saveAs(file, filename + '.' + suffix); } } } screenshotWatermark(options) { return new Promise((resolve, reject) => { if (isString(options)) { options = { filename: options }; } if (!this._canScreenshot()) { this.player.debug.warn('Video', 'screenshot failed, video is not ready'); return reject('screenshot failed, video is not ready'); } const $video = this.$videoElement; options = options || {}; options.width = $video.videoWidth; options.height = $video.videoHeight; options.filename = options.filename || now$2(); options.format = options.format ? SCREENSHOT_FORMAT_TYPE[options.format] : SCREENSHOT_FORMAT_TYPE.png; options.quality = Number(options.quality) || 0.92; options.type = options.type || SCREENSHOT_TYPE.download; let canvas = this.$canvasElement; canvas.width = $video.videoWidth; canvas.height = $video.videoHeight; this.canvasContext.drawImage($video, 0, 0, canvas.width, canvas.height); const dataURL = canvas.toDataURL(options.format, options.quality); // release memory this.canvasContext.clearRect(0, 0, canvas.width, canvas.height); canvas.width = 0; canvas.height = 0; createImageWatermark(dataURL, options).then(dataURL2 => { if (options.type === SCREENSHOT_TYPE.base64) { resolve(dataURL); } else { const file = dataURLToFile(dataURL2); if (options.type === SCREENSHOT_TYPE.blob) { resolve(file); } else if (options.type === SCREENSHOT_TYPE.download) { resolve(); const suffix = options.format.split('/')[1]; saveAs(file, options.filename + '.' + suffix); } } }).catch(e => { reject(e); }); }); } initCanvasViewSize() { this.resize(); } clear() { const $video = this.$videoElement; const ranges = $video.buffered; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; $video.currentTime = buffered; } // render(msg) { // for wcs or wasm if (this.vwriter) { // if (!this.$videoElement.srcObject) { this.$videoElement.srcObject = this.mediaStream; } if (this.isPaused()) { const readyState = this._getVideoReadyState(); this.player.debug.warn('Video', 'render() error, video is paused and readyState is ' + readyState); if (readyState === 4 && isFalse(this.isRenderRetryPlaying)) { this.isRenderRetryPlaying = true; if (this.isRenderRetryPlayingTimes > VIDEO_ELEMENT_RETRY_PLAY_MAX_TIME) { this.player.debug.error('Video', 'render() error, video is paused and readyState is ' + readyState + ', retry times is ' + this.isRenderRetryPlayingTimes + ', emit error and use canvas render'); this.player.emitError(EVENTS_ERROR.videoElementPlayingFailed); return; } this.$videoElement.play().then(() => { this.isRenderRetryPlayingTimes = 0; this.isRenderRetryPlaying = false; this.player.debug.log('Video', 'render() video is paused and replay success'); }).catch(e => { this.isRenderRetryPlaying = false; this.isRenderRetryPlayingTimes++; this.player.debug.warn('Video', 'render() error, video is paused and replay error ', e); }); } } this.player.updateStats({ fps: true, ts: msg.ts || 0 }); if (msg.videoFrame) { // just for wcs this.vwriter.write(msg.videoFrame); // release memory closeVideoFrame(msg.videoFrame); } else if (msg.output) { // just for wasm let yuvData = msg.output; if (this.player.faceDetectActive && this.player.ai && this.player.ai.faceDetector) { if (this.prevAiFaceDetectTime === null) { this.prevAiFaceDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiFaceDetectTime > this.player._opt.aiFaceDetectInterval) { yuvData = this.player.ai.faceDetector.detect({ width: this.videoInfo.width, height: this.videoInfo.height, data: msg.output, ts: msg.ts || 0 }); this.prevAiFaceDetectTime = _nowTime; } } if (this.player.objectDetectActive && this.player.ai && this.player.ai.objectDetector) { if (this.prevAiObjectDetectTime === null) { this.prevAiObjectDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiObjectDetectTime > this.player._opt.aiObjectDetectInterval) { yuvData = this.player.ai.objectDetector.detect({ width: this.videoInfo.width, height: this.videoInfo.height, data: msg.output, ts: msg.ts || 0 }); this.prevAiObjectDetectTime = _nowTime; } } // occlusion ai if (this.player.occlusionDetectActive && this.player.ai && this.player.ai.occlusionDetector) { if (this.prevAiOcclusionDetectTime === null) { this.prevAiOcclusionDetectTime = now$2(); } const _nowTime = now$2(); if (_nowTime - this.prevAiOcclusionDetectTime >= this.player._opt.aiOcclusionDetectInterval) { const result = this.player.ai.occlusionDetector.check({ width: this.videoInfo.width, height: this.videoInfo.height, data: msg.output, ts: msg.ts || 0 }); this.prevAiOcclusionDetectTime = _nowTime; if (result) { this.player.debug.log('Video', `render() and ai occlusion detect result is true`); // emit this.player.emit(EVENTS.aiOcclusionDetectResult, { ts: msg.ts || 0 }); } } } if (this.player.imageDetectActive && this.player.ai && this.player.ai.imageDetector) { const result = this.player.ai.imageDetector.check({ width: this.videoInfo.width, height: this.videoInfo.height, data: msg.output, ts: msg.ts || 0 }); if (result && result.data) { this.player.emit(EVENTS.aiOcclusionDetectResult, { type: result.type, ts: msg.ts || 0 }); if (this.player._opt.aiImageDetectDrop) { this.player.debug.log('Video', `render() and ai image detect result type is ${result.type} and drop`); return; } } } try { const videoFrame = createVideoFrame(yuvData, { format: 'I420', codedWidth: this.videoInfo.width, codedHeight: this.videoInfo.height, timestamp: msg.ts }); this.vwriter.write(videoFrame); // release memory closeVideoFrame(videoFrame); } catch (e) { this.player.debug.error('Video', 'render error', e); this.player.emitError(EVENTS_ERROR.wasmUseVideoRenderError, e); } } this.player.updateCurrentPts(msg.ts || 0); this.doAddContentToWatermark(); this.doAddAiContentToWatermark(); } else { this.player.debug.warn('Video', 'render and this.vwriter is null'); } } _resize() { this.player.debug.log('Video', '_resize()'); let width = this.player.width; let height = this.player.height; const option = this.player._opt; const rotate = option.rotate; if (option.hasControl && !option.controlAutoHide) { const controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; if (isMobile() && this.player.fullscreen && option.useWebFullScreen) { width -= controlHeight; } else { height -= controlHeight; } } this.$videoElement.width = width; this.$videoElement.height = height; this.$videoElement.style.width = width + "px"; this.$videoElement.style.height = height + "px"; // rotate if (rotate === 270 || rotate === 90) { this.$videoElement.width = height; this.$videoElement.height = width; this.$videoElement.style.width = height + "px"; this.$videoElement.style.height = width + "px"; } let resizeWidth = this.$videoElement.width; let resizeHeight = this.$videoElement.height; let left = (width - resizeWidth) / 2; let top = (height - resizeHeight) / 2; let objectFill = 'contain'; // 默认是true // 视频画面做等比缩放后,高或宽对齐canvas区域,画面不被拉伸,但有黑边 // 视频画面完全填充canvas区域,画面会被拉伸 if (isFalse(option.isResize)) { objectFill = 'fill'; } // 视频画面做等比缩放后,完全填充canvas区域,画面不被拉伸,没有黑边,但画面显示不全 if (option.isFullResize) { objectFill = 'none'; } let transform = ''; if (option.mirrorRotate === 'none') { if (rotate) { transform += ' rotate(' + rotate + 'deg)'; } } if (option.mirrorRotate === 'level') { transform += ' rotateY(180deg)'; // 水平镜像翻转 } else if (option.mirrorRotate === 'vertical') { transform += ' rotateX(180deg)'; // 垂直镜像翻转 } if (this.player._opt.videoRenderSupportScale) { this.$videoElement.style.objectFit = objectFill; } this.$videoElement.style.transform = transform; this.$videoElement.style.padding = "0"; this.$videoElement.style.left = left + "px"; this.$videoElement.style.top = top + "px"; } getType() { return RENDER_TYPE.video; } getCurrentTime() { return this.$videoElement.currentTime; } // is playing isPlaying() { return this.$videoElement && isFalse(this.$videoElement.paused) && isFalse(this.$videoElement.ended) && this.$videoElement.playbackRate !== 0 && this.$videoElement.readyState !== 0; } _canScreenshot() { return this.$videoElement && this.$videoElement.readyState >= 1; } getPlaybackQuality() { let result = null; if (this.$videoElement) { if (isFunction$1(this.$videoElement.getVideoPlaybackQuality)) { const info = this.$videoElement.getVideoPlaybackQuality(); result = { // 从创建起的已丢弃帧数数量 // 创建起的损坏帧数数量的 unsigned long 值。一个损坏帧可能属于创建帧或丢弃帧。 droppedVideoFrames: info.droppedVideoFrames || info.corruptedVideoFrames, totalVideoFrames: info.totalVideoFrames, creationTime: info.creationTime }; } else { result = { droppedVideoFrames: this.$videoElement.webkitDroppedFrameCount, totalVideoFrames: this.$videoElement.webkitDecodedFrameCount, creationTime: now$2() }; } if (result) { result.renderedVideoFrames = result.totalVideoFrames - result.droppedVideoFrames; } } return result; } setRate(value) { if (this.$videoElement) { this.$videoElement.playbackRate = value; } } get rate() { let result = 1; if (this.$videoElement) { result = this.$videoElement.playbackRate; } return result; } clearCheckVideoCanplayTimeout() { if (this.checkVideoCanplayTimeout) { clearTimeout(this.checkVideoCanplayTimeout); this.checkVideoCanplayTimeout = null; } } _cancelVideoFrameCallback() { if (this.supportVideoFrameCallbackHandle && this.$videoElement) { this.$videoElement.cancelVideoFrameCallback(this.supportVideoFrameCallbackHandle); this.supportVideoFrameCallbackHandle = null; } } _getBufferStore() { const $video = this.$videoElement; let result = 0; if ($video.buffered.length > 0) { result = $video.buffered.start(0); } return result; } // for mse _handleUpdatePlaybackRate() { const $video = this.$videoElement; const videoBuffer = this.player._opt.videoBuffer; const videoBufferDelay = this.player._opt.videoBufferDelay; let maxDelay = (videoBuffer + videoBufferDelay) / 1000; const ranges = $video.buffered; // 已缓冲的时间范围 ranges.length ? ranges.start(0) : 0; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; let time = $video.currentTime; const buffer = buffered - time; // not less than MSE_MAX_DELAY_TIME const maxDelayTime = Math.max(MSE_MAX_DELAY_TIME, maxDelay + MSE_DELAY_INCREASE_TIME); this.player.updateStats({ mseVideoBufferDelayTime: buffer }); if (buffer > maxDelayTime) { this.player.debug.log(this.TAG_NAME, `handleUpdatePlaybackRate and buffered is ${buffered} and current is ${time} , delay buffer is more than ${maxDelayTime} is ${buffer} and new time is ${buffered}`); $video.currentTime = buffered; time = $video.currentTime; } else if (buffer < 0) { this.player.debug.warn(this.TAG_NAME, `handleUpdatePlaybackRate and delay buffer is ${buffered} - current is ${time} = ${buffer} < 0 and check video is paused : ${$video.paused} `); // buffer if (buffered === 0) { this.player.emit(EVENTS_ERROR.mediaSourceBufferedIsZeroError, 'video.buffered is empty'); return; } // check if the video is paused if ($video.paused) { // if paused, play it $video.play(); } } const rate = this._getPlaybackRate(buffered - time); if ($video.playbackRate !== rate) { this.player.debug.log(this.TAG_NAME, `handleUpdatePlaybackRate and buffered is ${buffered} and current time is ${time} and delay is ${buffered - time} set playbackRate is ${rate} `); $video.playbackRate = rate; } } // for mse _getPlaybackRate(buffer) { const $video = this.$videoElement; let videoBufferDelay = this.player._opt.videoBufferDelay + this.player._opt.videoBuffer; // not less than 1000ms const maxDelay = Math.max(videoBufferDelay, 1000); const minDelay = maxDelay / 2; // s -> ms buffer = buffer * 1000; switch ($video.playbackRate) { case 1: if (buffer > maxDelay) { return 1.2; } return 1; default: if (buffer <= minDelay) { return 1; } return $video.playbackRate; } } getVideoCurrentTime() { let result = 0; if (this.$videoElement) { result = this.$videoElement.currentTime; } return result; } getVideoBufferLastTime() { const $video = this.$videoElement; let result = 0; if ($video) { const ranges = $video.buffered; // 已缓冲的时间范围 ranges.length ? ranges.start(0) : 0; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; result = buffered; } return result; } getVideoBufferDelayTime() { const $video = this.$videoElement; const buffered = this.getVideoBufferLastTime(); let time = $video.currentTime; const buffer = buffered - time; return buffer > 0 ? buffer : 0; } checkSourceBufferDelay() { const $video = this.$videoElement; let result = 0; let buffered = 0; if ($video.buffered.length > 0) { buffered = $video.buffered.end($video.buffered.length - 1); result = buffered - $video.currentTime; } if (result < 0) { this.player.debug.warn(this.TAG_NAME, `checkVideoSourceBufferDelay ${result} < 0, and buffered is ${buffered} ,currentTime is ${$video.currentTime} , try to seek ${$video.currentTime} to ${buffered}`); $video.currentTime = buffered; result = 0; } return result; } checkSourceBufferStore() { const $video = this.$videoElement; let result = 0; if ($video.buffered.length > 0) { result = $video.currentTime - $video.buffered.start(0); } return result; } getDecodePlaybackRate() { let result = 0; const $video = this.$videoElement; if ($video) { result = $video.playbackRate; } return result; } } class CanvasPlaybackLoader extends CommonCanvasLoader { constructor(player) { super(player); this.controlHeight = CONTROL_PLAYBACK_HEIGHT; // yuvlist this.bufferList = []; // this.playing = false; // this.playInterval = null; // this.fps = 1; // this.preFps = 1; this.streamFps = 0; // this.playbackRate = 1; // this._firstTimestamp = null; // this._renderFps = 0; // this._startfpsTime = null; // this._startFpsTimestamp = null; this._hasCalcFps = false; this.player.on(EVENTS.playbackPause, flag => { if (flag) { this.pause(); if (this.player.playback.isPlaybackPauseClearCache) { this.clear(); } } else { this.resume(); } }); this.player.debug.log(`CanvasPlaybackLoader`, 'init'); } destroy() { this._stopSync(); this._firstTimestamp = null; this.playing = false; this.playbackRate = 1; this.fps = 1; this.preFps = 1; this.bufferList = []; this._renderFps = 0; this._startfpsTime = null; this._startFpsTimestamp = null; this._hasCalcFps = false; super.destroy(); this.player.debug.log(`CanvasPlaybackLoader`, 'destroy'); } _initCanvasRender() { if (this.player._opt.useWCS) { this.renderType = CANVAS_RENDER_TYPE.webcodecs; if (isWebGL2Supported() && this.player._opt.wcsUseWebgl2Render) { this._initContextGl2(); if (this.webglRender) { this.isWcsWebgl2 = true; } } else { this._initContext2D(); } } else { if (this.player._opt.useWebGPU) { this.renderType = CANVAS_RENDER_TYPE.webgpu; this._initContextGPU(); } else { this.renderType = CANVAS_RENDER_TYPE.webgl; this._initContextGl(); } } } _sync() { this._stopSync(); // 第一帧 this._doPlay(); this.playInterval = setInterval(() => { // 后续帧。 this._doPlay(); }, this.fragDuration); } _doPlay() { if (this.bufferList.length > 0 && !this.player.seeking) { const bufferData = this.bufferList.shift(); if (bufferData && bufferData.buffer) { this._doRender(bufferData.buffer); this.player.handleRender(); this.player.playback.updateStats({ ts: bufferData.ts, tfTs: bufferData.tfTs }); } } } _stopSync() { if (this.playInterval) { clearInterval(this.playInterval); this.playInterval = null; } } // render method _doRender(buffer) { if (this.player._opt.useWCS) { if (this.webglRender) { this.webglRender.render(buffer); closeVideoFrame(buffer); } else { if (isFunction$1(buffer.createImageBitmap)) { try { buffer.createImageBitmap().then(image => { this.context2D.drawImage(image, 0, 0, this.$videoElement.width, this.$videoElement.height); closeVideoFrame(buffer); }); } catch (e) {} } else { this.context2D.drawImage(buffer, 0, 0, this.$videoElement.width, this.$videoElement.height); closeVideoFrame(buffer); } } } else { if (this.getCanvasType() === CANVAS_RENDER_TYPE.webgl) { try { this.webglRender.renderYUV(this.$videoElement.width, this.$videoElement.height, buffer); } catch (e) { this.player.debug.error(`CanvasPlaybackLoader`, `doRender webgl render context is lost ${this.contextGl && this.contextGl.isContextLost()} and error: ${e.toString()}`); } } else if (this.getCanvasType() === CANVAS_RENDER_TYPE.webgpu) { try { if (!this.webGPURender) { this.player.debug.warn('CanvasVideoLoader', 'doRender webgpu render is not init'); return; } this.webGPURender.renderYUV(this.$videoElement.width, this.$videoElement.height, buffer); } catch (e) { this.player.debug.error(`CanvasPlaybackLoader`, `doRender webgpu render and error: ${e.toString()}`); } } } } get rate() { return this.playbackRate; } get fragDuration() { return Math.ceil(1000 / (this.fps * this.playbackRate)); } get bufferSize() { return this.bufferList.length; } getStreamFps() { return this.streamFps; } initFps() { if (!this._hasCalcFps) { this.preFps = clamp(this.player.playback.fps, 1, 100); this.fps = this.preFps; } else { this.player.debug.log(`CanvasPlaybackLoader`, 'initFps, has calc fps'); } } setFps(value) { if (value !== this.fps) { // 如果fps 超过了,则需要 if (value > 100) { this.player.debug.warn('CanvasPlaybackLoader', 'setFps max', value); } if (value < 0) { this.player.debug.warn('CanvasPlaybackLoader', 'setFps min', value); } this.fps = clamp(value, 1, 100); this.player.debug.log('CanvasPlaybackLoader', `setFps ${this.preFps} -> ${this.fps}`); if (this.player.playback.isUseFpsRender) { this._sync(); } } else { this.player.debug.log(`CanvasPlaybackLoader`, `setFps, same fps ${value}`); } } setStreamFps(fps) { this.player.debug.log(`CanvasPlaybackLoader`, 'setStreamFps', fps); this._hasCalcFps = true; this.streamFps = fps; this.preFps = fps; this.setFps(fps); } setRate(value) { if (value !== this.playbackRate) { // this.player.debug.log(`CanvasPlaybackLoader`, 'setRate', value); this.playbackRate = value; if (this.player.playback.isUseFpsRender) { this._sync(); } } } render$2(msg) { if (this._firstTimestamp === null) { this._firstTimestamp = msg.ts; } const data = { tfTs: msg.ts - this._firstTimestamp, ts: msg.ts }; if (msg.videoFrame) { data.buffer = msg.videoFrame; } else { data.buffer = msg.output; } this.bufferList.push(data); this.startRender(); this.player.handleRender(); this.player.playback.updateStats({ ts: msg.ts, tfTs: data.tfTs }); } startRender() { while (true) { if (this.bufferList.length <= 0) { break; } const yuv = this.bufferList.shift(); this._doRender(yuv.buffer); } } pushData(msg) { if (this._firstTimestamp === null) { this._firstTimestamp = msg.ts; } const data = { tfTs: msg.ts - this._firstTimestamp, ts: msg.ts }; if (msg.videoFrame) { data.buffer = msg.videoFrame; } else { data.buffer = msg.output; } // 是否解码器缓冲数据 const isCacheBeforeDecodeForFpsRender = this.player._opt.playbackConfig.isCacheBeforeDecodeForFpsRender; if (!isCacheBeforeDecodeForFpsRender) { // 如果缓冲数据超过 2s 直接加速播放掉。 if (this.bufferSize > this.fps * this.playbackRate * 2) { this.player.debug.warn('CanvasPlaybackLoader', `buffer size is ${this.bufferSize}`); this._doPlay(); } } this.bufferList.push(data); if (!this._hasCalcFps) { const streamFps = calcStreamFpsByBufferList(this.bufferList); if (streamFps !== null) { if (streamFps !== this.preFps) { this.player.debug.log('CanvasPlaybackLoader', `calc fps is ${streamFps} pre fps is ${this.preFps} and updatePreFps`); this.setStreamFps(streamFps); } } } if (!isCacheBeforeDecodeForFpsRender) { // this.player.debug.log(`CanvasPlaybackLoader`, 'pushData', this.bufferSize); // if buffer length const bufferListLength = this.bufferList.length; const fps = this.fps * this.playbackRate; const rate = bufferListLength / fps; this.player.debug.log(`CanvasPlaybackLoader`, 'rate is', rate); if (rate <= 1) { this.setFps(this.preFps); } else { this.setFps(this.fps + Math.floor(rate * this.playbackRate)); this.player.debug.warn('CanvasPlaybackLoader', 'rate is', rate, 'fps is', this.fps, 'bufferListLength is', bufferListLength); } } // check bufferList length is more large max buffer size then drop buffer // if (this.bufferList.length) { // // } } initVideo() { if (this.player.playback && this.player.playback.isUseFpsRender) { this._sync(); } this.playing = true; } initVideoDelay() { const delayTime = this.player._opt.playbackDelayTime; if (delayTime > 0) { this.delayTimeout = setTimeout(() => { this.initVideo(); // console.error('initVideoDelay , bufferList length', this.bufferList.length); }, delayTime); } else { this.initVideo(); } } clearView() { super.clearView(); this.contextGl.clear(this.contextGl.COLOR_BUFFER_BIT); } clear() { if (this.player._opt.useWCS) { this.bufferList.forEach(bufferData => { if (bufferData.buffer) { closeVideoFrame(bufferData.buffer); } }); } this.bufferList = []; } resume() { if (this.player.playback.isUseFpsRender) { this._sync(); } this.playing = true; } pause() { if (this.player.playback.isUseFpsRender) { this._stopSync(); } this.playing = false; } } class Video { constructor(player) { const Loader = Video.getLoaderFactory(player._opt); return new Loader(player); } static getLoaderFactory(opt) { if (opt.useMSE) { if (opt.mseUseCanvasRender) { return CanvasVideoLoader; } else { return VideoLoader; } } else if (opt.isHls && isFalse(opt.supportHls265)) { if (opt.useCanvasRender) { return CanvasVideoLoader; } else { return VideoLoader; } } else if (opt.isWebrtc && isFalse(opt.isWebrtcH265)) { if (opt.useCanvasRender) { return CanvasVideoLoader; } else { return VideoLoader; } } else if (opt.isAliyunRtc) { return VideoLoader; } else if (opt.useWCS) { // playback if (opt.playType === PLAY_TYPE.playbackTF) { return CanvasPlaybackLoader; } else { if (!opt.useOffscreen && opt.wcsUseVideoRender) { return VideoLoader; } else { return CanvasVideoLoader; } } } else { // wasm if (opt.playType === PLAY_TYPE.playbackTF) { return CanvasPlaybackLoader; } else { if (opt.wasmUseVideoRender && !opt.useOffscreen) { return VideoLoader; } else { return CanvasVideoLoader; } } } } } class CommonContextLoader extends Emitter { constructor(player) { super(); this.bufferList = []; this.player = player; this.$audio = null; this.scriptNode = null; this.workletProcessorNode = null; this.hasInitScriptNode = false; this.audioContext = new (window.AudioContext || window.webkitAudioContext)({ sampleRate: 48000 }); // this.gainNode = this.audioContext.createGain(); // Get an AudioBufferSourceNode. // This is the AudioNode to use when we want to play an AudioBuffer const source = this.audioContext.createBufferSource(); // set the buffer in the AudioBufferSourceNode source.buffer = this.audioContext.createBuffer(1, 1, 22050); // connect the AudioBufferSourceNode to the // destination so we can hear the sound source.connect(this.audioContext.destination); // noteOn as start // start the source playing if (source.noteOn) { source.noteOn(0); } else { source.start(0); } this.audioBufferSourceNode = source; // this.mediaStreamAudioDestinationNode = this.audioContext.createMediaStreamDestination(); // // default setting 0 this.gainNode.gain.value = 0; this.playing = false; this.audioInfo = { encTypeCode: '', encType: '', channels: '', sampleRate: '', depth: '' }; this.init = false; this.hasAudio = false; this.audioResumeStateTimeout = null; } destroy() { this.closeAudio(); this.resetInit(); if (this.audioContext) { this.audioContext.close(); this.audioContext = null; } this.gainNode = null; this.hasAudio = false; this.playing = false; if (this.scriptNode) { this.scriptNode.onaudioprocess = noop$3; this.scriptNode = null; } if (this.workletProcessorNode) { this.workletProcessorNode.port.onmessage = noop$3; this.workletProcessorNode = null; } this.clearAudioResumeStateTimeout(); this.audioBufferSourceNode = null; this.mediaStreamAudioDestinationNode = null; this.hasInitScriptNode = false; this.off(); } resetInit() { this.audioInfo = { encTypeCode: '', encType: '', channels: '', sampleRate: '', depth: '' }; this.init = false; } getAudioInfo() { return this.audioInfo; } updateAudioInfo(data) { if (data.encTypeCode) { this.audioInfo.encTypeCode = data.encTypeCode; this.audioInfo.encType = AUDIO_ENC_TYPE[data.encTypeCode]; } if (data.channels) { this.audioInfo.channels = data.channels; } if (data.sampleRate) { this.audioInfo.sampleRate = data.sampleRate; } // if (data.depth) { this.audioInfo.depth = data.depth; } // audio 基本信息 if (this.audioInfo.sampleRate && this.audioInfo.channels && this.audioInfo.encType && !this.init) { this.player.emit(EVENTS.audioInfo, this.audioInfo); this.init = true; } } // get isPlaying() { return this.playing; } get isMute() { return this.gainNode.gain.value === 0; } get volume() { return this.gainNode.gain.value; } get bufferSize() { return this.bufferList.length; } get audioContextState() { let result = null; if (this.audioContext) { result = this.audioContext.state; } return result; } initScriptNode() {} initMobileScriptNode() {} initWorkletScriptNode() {} getEngineType() { return ''; } mute(flag) { if (flag) { if (!this.isMute) { this.player.emit(EVENTS.mute, flag); } this.setVolume(0); this.clear(); } else { if (this.isMute) { this.player.emit(EVENTS.mute, flag); } this.setVolume(this.player.lastVolume || 0.5); } } setVolume(volume) { volume = parseFloat(volume).toFixed(2); if (isNaN(volume)) { return; } this.audioEnabled(true); volume = clamp(volume, 0, 1); this.gainNode.gain.value = volume; // this.gainNode.gain.setValueAtTime(volume, this.audioContext.currentTime); this.player.emit(EVENTS.volumechange, this.player.volume); } closeAudio() { if (this.hasInitScriptNode) { this.scriptNode && this.scriptNode.disconnect(this.gainNode); this.workletProcessorNode && this.workletProcessorNode.disconnect(this.gainNode); if (this.gainNode) { this.gainNode.disconnect(this.mediaStreamAudioDestinationNode); if (!this.$audio) { this.gainNode.disconnect(this.audioContext.destination); } } } this.clear(); } // 是否播放。。。 audioEnabled(flag) { if (flag) { if (this.isStateSuspended()) { // resume this.audioContext.resume().then(() => { this.player.emit(EVENTS.audioResumeState, { state: this.audioContextState, isRunning: this.isStateRunning() }); }); this.audioResumeStateTimeout = setTimeout(() => { this.clearAudioResumeStateTimeout(); if (this.isStateSuspended()) { this.player.emit(EVENTS.audioResumeState, { state: this.audioContextState, isRunning: this.isStateRunning() }); } }, 1000); } } else { if (this.isStateRunning()) { // suspend this.audioContext.suspend(); } } } isStateRunning() { return this.audioContextState === 'running'; } isStateSuspended() { return this.audioContextState === 'suspended'; } clearAudioResumeStateTimeout() { if (this.audioResumeStateTimeout) { clearTimeout(this.audioResumeStateTimeout); this.audioResumeStateTimeout = null; } } clear() { this.bufferList = []; } play(buffer, ts) { // empty } pause() { this.playing = false; // this.clear(); } resume() { this.playing = true; } setRate(value) { // empty } getAudioBufferSize() { return 0; } } class Processor { constructor(player, audio, channel, bufferSize) { this.player = player; this.audio = audio; this.channel = channel; this.bufferSize = bufferSize; } // extract(target, numFrames) { let data = this.provide(numFrames); for (let i = 0; i < data.size; i++) { target[i * 2] = data.left[i]; target[i * 2 + 1] = data.right[i]; } this.audio.tempAudioTimestamp = data.ts; return data.size; } provide(sourceSize) { let left = new Float32Array(sourceSize); let right = new Float32Array(sourceSize); let size = 0; let ts = 0; let sourcePosition = 0; let num = sourceSize / this.bufferSize; const bufferList = this.audio.bufferList; if (num && bufferList.length >= num) { try { for (let i = 0; i < num; i++) { const bufferItem = bufferList.shift(); if (this.channel === 2) { left.set(bufferItem.buffer[0], sourcePosition); right.set(bufferItem.buffer[1], sourcePosition); } else { left.set(bufferItem.buffer[0], sourcePosition); right.set(bufferItem.buffer[0], sourcePosition); } sourcePosition += this.bufferSize; ts = bufferItem.ts; } } catch (e) { this.player.debug.warn('Processor', 'provide()', e); left = new Float32Array(0); right = new Float32Array(0); } size = left.length; } return { size, ts, left, right }; } destroy() { this.buffer = null; this.channel = null; } } /** * sound rate */ class FifoSampleBuffer { constructor() { this._vector = new Float32Array(); this._position = 0; this._frameCount = 0; } get vector() { return this._vector; } get position() { return this._position; } get startIndex() { return this._position * 2; } get frameCount() { return this._frameCount; } get endIndex() { return (this._position + this._frameCount) * 2; } clear() { this.receive(this._frameCount); this.rewind(); } put(numFrames) { this._frameCount += numFrames; } putSamples(samples, position) { let numFrames = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; position = position || 0; const sourceOffset = position * 2; if (!(numFrames >= 0)) { numFrames = (samples.length - sourceOffset) / 2; } const numSamples = numFrames * 2; this.ensureCapacity(numFrames + this._frameCount); const destOffset = this.endIndex; this.vector.set(samples.subarray(sourceOffset, sourceOffset + numSamples), destOffset); this._frameCount += numFrames; } putBuffer(buffer, position) { let numFrames = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; position = position || 0; if (!(numFrames >= 0)) { numFrames = buffer.frameCount - position; } this.putSamples(buffer.vector, buffer.position + position, numFrames); } receive(numFrames) { if (!(numFrames >= 0) || numFrames > this._frameCount) { numFrames = this.frameCount; } this._frameCount -= numFrames; this._position += numFrames; } receiveSamples(output) { let numFrames = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; const numSamples = numFrames * 2; const sourceOffset = this.startIndex; output.set(this._vector.subarray(sourceOffset, sourceOffset + numSamples)); this.receive(numFrames); } extract(output) { let position = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; let numFrames = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; const sourceOffset = this.startIndex + position * 2; const numSamples = numFrames * 2; output.set(this._vector.subarray(sourceOffset, sourceOffset + numSamples)); } ensureCapacity() { let numFrames = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; const minLength = parseInt(numFrames * 2); if (this._vector.length < minLength) { const newVector = new Float32Array(minLength); newVector.set(this._vector.subarray(this.startIndex, this.endIndex)); this._vector = newVector; this._position = 0; } else { this.rewind(); } } ensureAdditionalCapacity() { let numFrames = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; this.ensureCapacity(this._frameCount + numFrames); } rewind() { if (this._position > 0) { this._vector.set(this._vector.subarray(this.startIndex, this.endIndex)); this._position = 0; } } } class AbstractFifoSamplePipe { constructor(createBuffers) { if (createBuffers) { this._inputBuffer = new FifoSampleBuffer(); this._outputBuffer = new FifoSampleBuffer(); } else { this._inputBuffer = this._outputBuffer = null; } } get inputBuffer() { return this._inputBuffer; } set inputBuffer(inputBuffer) { this._inputBuffer = inputBuffer; } get outputBuffer() { return this._outputBuffer; } set outputBuffer(outputBuffer) { this._outputBuffer = outputBuffer; } clear() { this._inputBuffer.clear(); this._outputBuffer.clear(); } } class RateTransposer extends AbstractFifoSamplePipe { constructor(createBuffers) { super(createBuffers); this.reset(); this._rate = 1; } set rate(rate) { this._rate = rate; } reset() { this.slopeCount = 0; this.prevSampleL = 0; this.prevSampleR = 0; } clone() { const result = new RateTransposer(); result.rate = this._rate; return result; } process() { const numFrames = this._inputBuffer.frameCount; this._outputBuffer.ensureAdditionalCapacity(numFrames / this._rate + 1); const numFramesOutput = this.transpose(numFrames); this._inputBuffer.receive(); this._outputBuffer.put(numFramesOutput); } transpose() { let numFrames = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; if (numFrames === 0) { return 0; } const src = this._inputBuffer.vector; const srcOffset = this._inputBuffer.startIndex; const dest = this._outputBuffer.vector; const destOffset = this._outputBuffer.endIndex; let used = 0; let i = 0; while (this.slopeCount < 1.0) { dest[destOffset + 2 * i] = (1.0 - this.slopeCount) * this.prevSampleL + this.slopeCount * src[srcOffset]; dest[destOffset + 2 * i + 1] = (1.0 - this.slopeCount) * this.prevSampleR + this.slopeCount * src[srcOffset + 1]; i = i + 1; this.slopeCount += this._rate; } this.slopeCount -= 1.0; if (numFrames !== 1) { out: while (true) { while (this.slopeCount > 1.0) { this.slopeCount -= 1.0; used = used + 1; if (used >= numFrames - 1) { break out; } } const srcIndex = srcOffset + 2 * used; dest[destOffset + 2 * i] = (1.0 - this.slopeCount) * src[srcIndex] + this.slopeCount * src[srcIndex + 2]; dest[destOffset + 2 * i + 1] = (1.0 - this.slopeCount) * src[srcIndex + 1] + this.slopeCount * src[srcIndex + 3]; i = i + 1; this.slopeCount += this._rate; } } this.prevSampleL = src[srcOffset + 2 * numFrames - 2]; this.prevSampleR = src[srcOffset + 2 * numFrames - 1]; return i; } } class FilterSupport { constructor(pipe) { this._pipe = pipe; } get pipe() { return this._pipe; } get inputBuffer() { return this._pipe.inputBuffer; } get outputBuffer() { return this._pipe.outputBuffer; } fillInputBuffer() { throw new Error('fillInputBuffer() not overridden'); } fillOutputBuffer() { let numFrames = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; while (this.outputBuffer.frameCount < numFrames) { const numInputFrames = 8192 * 2 - this.inputBuffer.frameCount; this.fillInputBuffer(numInputFrames); if (this.inputBuffer.frameCount < 8192 * 2) { break; } this._pipe.process(); } } clear() { this._pipe.clear(); } } const noop$2 = function () { return; }; class SimpleFilter extends FilterSupport { constructor(sourceSound, pipe) { let callback = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop$2; super(pipe); this.callback = callback; this.sourceSound = sourceSound; this.historyBufferSize = 22050; this._sourcePosition = 0; this.outputBufferPosition = 0; this._position = 0; } get position() { return this._position; } set position(position) { if (position > this._position) { throw new RangeError('New position may not be greater than current position'); } const newOutputBufferPosition = this.outputBufferPosition - (this._position - position); if (newOutputBufferPosition < 0) { throw new RangeError('New position falls outside of history buffer'); } this.outputBufferPosition = newOutputBufferPosition; this._position = position; } get sourcePosition() { return this._sourcePosition; } set sourcePosition(sourcePosition) { this.clear(); this._sourcePosition = sourcePosition; } onEnd() { this.callback(); } fillInputBuffer() { let numFrames = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; const samples = new Float32Array(numFrames * 2); const numFramesExtracted = this.sourceSound.extract(samples, numFrames, this._sourcePosition); this._sourcePosition += numFramesExtracted; this.inputBuffer.putSamples(samples, 0, numFramesExtracted); } extract(target) { let numFrames = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; this.fillOutputBuffer(this.outputBufferPosition + numFrames); const numFramesExtracted = Math.min(numFrames, this.outputBuffer.frameCount - this.outputBufferPosition); this.outputBuffer.extract(target, this.outputBufferPosition, numFramesExtracted); const currentFrames = this.outputBufferPosition + numFramesExtracted; this.outputBufferPosition = Math.min(this.historyBufferSize, currentFrames); this.outputBuffer.receive(Math.max(currentFrames - this.historyBufferSize, 0)); this._position += numFramesExtracted; return numFramesExtracted; } handleSampleData(event) { this.extract(event.data, 4096); } clear() { super.clear(); this.outputBufferPosition = 0; } } const USE_AUTO_SEQUENCE_LEN = 0; const DEFAULT_SEQUENCE_MS = USE_AUTO_SEQUENCE_LEN; const USE_AUTO_SEEKWINDOW_LEN = 0; const DEFAULT_SEEKWINDOW_MS = USE_AUTO_SEEKWINDOW_LEN; const DEFAULT_OVERLAP_MS = 8; const _SCAN_OFFSETS = [[124, 186, 248, 310, 372, 434, 496, 558, 620, 682, 744, 806, 868, 930, 992, 1054, 1116, 1178, 1240, 1302, 1364, 1426, 1488, 0], [-100, -75, -50, -25, 25, 50, 75, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [-20, -15, -10, -5, 5, 10, 15, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [-4, -3, -2, -1, 1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]; const AUTOSEQ_TEMPO_LOW = 0.5; const AUTOSEQ_TEMPO_TOP = 2.0; const AUTOSEQ_AT_MIN = 125.0; const AUTOSEQ_AT_MAX = 50.0; const AUTOSEQ_K = (AUTOSEQ_AT_MAX - AUTOSEQ_AT_MIN) / (AUTOSEQ_TEMPO_TOP - AUTOSEQ_TEMPO_LOW); const AUTOSEQ_C = AUTOSEQ_AT_MIN - AUTOSEQ_K * AUTOSEQ_TEMPO_LOW; const AUTOSEEK_AT_MIN = 25.0; const AUTOSEEK_AT_MAX = 15.0; const AUTOSEEK_K = (AUTOSEEK_AT_MAX - AUTOSEEK_AT_MIN) / (AUTOSEQ_TEMPO_TOP - AUTOSEQ_TEMPO_LOW); const AUTOSEEK_C = AUTOSEEK_AT_MIN - AUTOSEEK_K * AUTOSEQ_TEMPO_LOW; class Stretch extends AbstractFifoSamplePipe { constructor(createBuffers) { super(createBuffers); this._quickSeek = true; this.midBufferDirty = false; this.midBuffer = null; this.overlapLength = 0; this.autoSeqSetting = true; this.autoSeekSetting = true; this._tempo = 1; this.setParameters(44100, DEFAULT_SEQUENCE_MS, DEFAULT_SEEKWINDOW_MS, DEFAULT_OVERLAP_MS); } clear() { super.clear(); this.clearMidBuffer(); } clearMidBuffer() { if (this.midBufferDirty) { this.midBufferDirty = false; this.midBuffer = null; } } setParameters(sampleRate, sequenceMs, seekWindowMs, overlapMs) { if (sampleRate > 0) { this.sampleRate = sampleRate; } if (overlapMs > 0) { this.overlapMs = overlapMs; } if (sequenceMs > 0) { this.sequenceMs = sequenceMs; this.autoSeqSetting = false; } else { this.autoSeqSetting = true; } if (seekWindowMs > 0) { this.seekWindowMs = seekWindowMs; this.autoSeekSetting = false; } else { this.autoSeekSetting = true; } this.calculateSequenceParameters(); this.calculateOverlapLength(this.overlapMs); this.tempo = this._tempo; } set tempo(newTempo) { let intskip; this._tempo = newTempo; this.calculateSequenceParameters(); this.nominalSkip = this._tempo * (this.seekWindowLength - this.overlapLength); this.skipFract = 0; intskip = Math.floor(this.nominalSkip + 0.5); this.sampleReq = Math.max(intskip + this.overlapLength, this.seekWindowLength) + this.seekLength; } get tempo() { return this._tempo; } get inputChunkSize() { return this.sampleReq; } get outputChunkSize() { return this.overlapLength + Math.max(0, this.seekWindowLength - 2 * this.overlapLength); } calculateOverlapLength() { let overlapInMsec = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; let newOvl; newOvl = this.sampleRate * overlapInMsec / 1000; newOvl = newOvl < 16 ? 16 : newOvl; newOvl -= newOvl % 8; this.overlapLength = newOvl; this.refMidBuffer = new Float32Array(this.overlapLength * 2); this.midBuffer = new Float32Array(this.overlapLength * 2); } checkLimits(x, mi, ma) { return x < mi ? mi : x > ma ? ma : x; } calculateSequenceParameters() { let seq; let seek; if (this.autoSeqSetting) { seq = AUTOSEQ_C + AUTOSEQ_K * this._tempo; seq = this.checkLimits(seq, AUTOSEQ_AT_MAX, AUTOSEQ_AT_MIN); this.sequenceMs = Math.floor(seq + 0.5); } if (this.autoSeekSetting) { seek = AUTOSEEK_C + AUTOSEEK_K * this._tempo; seek = this.checkLimits(seek, AUTOSEEK_AT_MAX, AUTOSEEK_AT_MIN); this.seekWindowMs = Math.floor(seek + 0.5); } this.seekWindowLength = Math.floor(this.sampleRate * this.sequenceMs / 1000); this.seekLength = Math.floor(this.sampleRate * this.seekWindowMs / 1000); } set quickSeek(enable) { this._quickSeek = enable; } clone() { const result = new Stretch(); result.tempo = this._tempo; result.setParameters(this.sampleRate, this.sequenceMs, this.seekWindowMs, this.overlapMs); return result; } seekBestOverlapPosition() { return this._quickSeek ? this.seekBestOverlapPositionStereoQuick() : this.seekBestOverlapPositionStereo(); } seekBestOverlapPositionStereo() { let bestOffset; let bestCorrelation; let correlation; let i = 0; this.preCalculateCorrelationReferenceStereo(); bestOffset = 0; bestCorrelation = Number.MIN_VALUE; for (; i < this.seekLength; i = i + 1) { correlation = this.calculateCrossCorrelationStereo(2 * i, this.refMidBuffer); if (correlation > bestCorrelation) { bestCorrelation = correlation; bestOffset = i; } } return bestOffset; } seekBestOverlapPositionStereoQuick() { let bestOffset; let bestCorrelation; let correlation; let scanCount = 0; let correlationOffset; let tempOffset; this.preCalculateCorrelationReferenceStereo(); bestCorrelation = Number.MIN_VALUE; bestOffset = 0; correlationOffset = 0; tempOffset = 0; for (; scanCount < 4; scanCount = scanCount + 1) { let j = 0; while (_SCAN_OFFSETS[scanCount][j]) { tempOffset = correlationOffset + _SCAN_OFFSETS[scanCount][j]; if (tempOffset >= this.seekLength) { break; } correlation = this.calculateCrossCorrelationStereo(2 * tempOffset, this.refMidBuffer); if (correlation > bestCorrelation) { bestCorrelation = correlation; bestOffset = tempOffset; } j = j + 1; } correlationOffset = bestOffset; } return bestOffset; } preCalculateCorrelationReferenceStereo() { let i = 0; let context; let temp; for (; i < this.overlapLength; i = i + 1) { temp = i * (this.overlapLength - i); context = i * 2; this.refMidBuffer[context] = this.midBuffer[context] * temp; this.refMidBuffer[context + 1] = this.midBuffer[context + 1] * temp; } } calculateCrossCorrelationStereo(mixingPosition, compare) { const mixing = this._inputBuffer.vector; mixingPosition += this._inputBuffer.startIndex; let correlation = 0; let i = 2; const calcLength = 2 * this.overlapLength; let mixingOffset; for (; i < calcLength; i = i + 2) { mixingOffset = i + mixingPosition; correlation += mixing[mixingOffset] * compare[i] + mixing[mixingOffset + 1] * compare[i + 1]; } return correlation; } overlap(overlapPosition) { this.overlapStereo(2 * overlapPosition); } overlapStereo(inputPosition) { const input = this._inputBuffer.vector; inputPosition += this._inputBuffer.startIndex; const output = this._outputBuffer.vector; const outputPosition = this._outputBuffer.endIndex; let i = 0; let context; let tempFrame; const frameScale = 1 / this.overlapLength; let fi; let inputOffset; let outputOffset; for (; i < this.overlapLength; i = i + 1) { tempFrame = (this.overlapLength - i) * frameScale; fi = i * frameScale; context = 2 * i; inputOffset = context + inputPosition; outputOffset = context + outputPosition; output[outputOffset + 0] = input[inputOffset + 0] * fi + this.midBuffer[context + 0] * tempFrame; output[outputOffset + 1] = input[inputOffset + 1] * fi + this.midBuffer[context + 1] * tempFrame; } } process() { let offset; let temp; let overlapSkip; if (this.midBuffer === null) { if (this._inputBuffer.frameCount < this.overlapLength) { return; } this.midBuffer = new Float32Array(this.overlapLength * 2); this._inputBuffer.receiveSamples(this.midBuffer, this.overlapLength); } while (this._inputBuffer.frameCount >= this.sampleReq) { offset = this.seekBestOverlapPosition(); this._outputBuffer.ensureAdditionalCapacity(this.overlapLength); this.overlap(Math.floor(offset)); this._outputBuffer.put(this.overlapLength); temp = this.seekWindowLength - 2 * this.overlapLength; if (temp > 0) { this._outputBuffer.putBuffer(this._inputBuffer, offset + this.overlapLength, temp); } const start = this._inputBuffer.startIndex + 2 * (offset + this.seekWindowLength - this.overlapLength); this.midBuffer.set(this._inputBuffer.vector.subarray(start, start + 2 * this.overlapLength)); this.skipFract += this.nominalSkip; overlapSkip = Math.floor(this.skipFract); this.skipFract -= overlapSkip; this._inputBuffer.receive(overlapSkip); } } } const testFloatEqual = function (a, b) { return (a > b ? a - b : b - a) > 1e-10; }; class SoundTouch { constructor() { this.transposer = new RateTransposer(false); this.stretch = new Stretch(false); this._inputBuffer = new FifoSampleBuffer(); this._intermediateBuffer = new FifoSampleBuffer(); this._outputBuffer = new FifoSampleBuffer(); this._rate = 0; this._tempo = 0; this.virtualPitch = 1.0; this.virtualRate = 1.0; this.virtualTempo = 1.0; this.calculateEffectiveRateAndTempo(); } clear() { this.transposer.clear(); this.stretch.clear(); } clone() { const result = new SoundTouch(); result.rate = this.rate; result.tempo = this.tempo; return result; } get rate() { return this._rate; } set rate(rate) { this.virtualRate = rate; this.calculateEffectiveRateAndTempo(); } set rateChange(rateChange) { this._rate = 1.0 + 0.01 * rateChange; } get tempo() { return this._tempo; } set tempo(tempo) { this.virtualTempo = tempo; this.calculateEffectiveRateAndTempo(); } set tempoChange(tempoChange) { this.tempo = 1.0 + 0.01 * tempoChange; } set pitch(pitch) { this.virtualPitch = pitch; this.calculateEffectiveRateAndTempo(); } set pitchOctaves(pitchOctaves) { this.pitch = Math.exp(0.69314718056 * pitchOctaves); this.calculateEffectiveRateAndTempo(); } set pitchSemitones(pitchSemitones) { this.pitchOctaves = pitchSemitones / 12.0; } get inputBuffer() { return this._inputBuffer; } get outputBuffer() { return this._outputBuffer; } calculateEffectiveRateAndTempo() { const previousTempo = this._tempo; const previousRate = this._rate; this._tempo = this.virtualTempo / this.virtualPitch; this._rate = this.virtualRate * this.virtualPitch; if (testFloatEqual(this._tempo, previousTempo)) { this.stretch.tempo = this._tempo; } if (testFloatEqual(this._rate, previousRate)) { this.transposer.rate = this._rate; } if (this._rate > 1.0) { if (this._outputBuffer != this.transposer.outputBuffer) { this.stretch.inputBuffer = this._inputBuffer; this.stretch.outputBuffer = this._intermediateBuffer; this.transposer.inputBuffer = this._intermediateBuffer; this.transposer.outputBuffer = this._outputBuffer; } } else { if (this._outputBuffer != this.stretch.outputBuffer) { this.transposer.inputBuffer = this._inputBuffer; this.transposer.outputBuffer = this._intermediateBuffer; this.stretch.inputBuffer = this._intermediateBuffer; this.stretch.outputBuffer = this._outputBuffer; } } } process() { if (this._rate > 1.0) { this.stretch.process(); this.transposer.process(); } else { this.transposer.process(); this.stretch.process(); } } } class RateProcessor { constructor(player, audio, source) { this.player = player; this.audio = audio; this.soundTouch = new SoundTouch(); this.soundTouch.tempo = 1; this.soundTouch.rate = 1; this.filter = new SimpleFilter(source, this.soundTouch); } setRate(value) { if (value !== this.soundTouch.rate) { this.soundTouch.tempo = value; } } provide(size) { let target = new Float32Array(size * 2); let framesExtracted = this.filter.extract(target, size); let left = new Float32Array(framesExtracted); let right = new Float32Array(framesExtracted); // 缩小 for (let i = 0; i < framesExtracted; i++) { left[i] = target[i * 2]; right[i] = target[i * 2 + 1]; } // return { size: framesExtracted, left, right, ts: this.audio.tempAudioTimestamp || 0 }; } destroy() { if (this.soundTouch) { this.soundTouch.clear(); this.soundTouch = null; } if (this.filter) { this.filter = null; } } } class AudioContextLoader extends CommonContextLoader { constructor(player) { super(player); // default is 1 this.defaultPlaybackRate = 1; this.playbackRate = 1; this.rateProcessor = null; this.processor = null; this.scriptNodeInterval = null; this.engineType = this.getAutoAudioEngineType(); this.audioBufferSize = this.getAudioBufferSizeByType(); this.$audio = null; this._delayPlay = false; this.eventListenList = []; this.workletUrl = null; this.clearWorkletUrlTimeout = null; // support mobile(ipad) lock screen play // https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/play if (this.player._opt.supportLockScreenPlayAudio && isIOS()) { this.$audio = document.createElement("audio"); Object.assign(this.$audio.style, { position: "absolute", left: "-100%", top: "-100%" }); if (player.$container) { player.$container.appendChild(this.$audio); } else { document.body.appendChild(this.$audio); } this._bindAudioProxy(); this.player.debug.log('AudioContext', `create audio element`); } this.scriptStartTime = 0; this.player.debug.log('AudioContext', 'init', `engineType: ${this.engineType}, audioBufferSize: ${this.audioBufferSize}`); } // destroy() { super.destroy(); if (this.workletUrl) { URL.revokeObjectURL(this.workletUrl); this.workletUrl = null; } if (this.clearWorkletUrlTimeout) { clearTimeout(this.clearWorkletUrlTimeout); this.clearWorkletUrlTimeout = null; } if (this.eventListenList) { this.eventListenList.forEach(item => { item(); }); this.eventListenList = []; } if (this.$audio) { this.$audio.pause(); this.$audio.srcObject = null; if (this.$audio.parentNode) { this.$audio.parentNode.removeChild(this.$audio); } this.$audio = null; } if (this.processor) { this.processor.destroy(); this.processor = null; } if (this.rateProcessor) { this.rateProcessor.destroy(); this.rateProcessor = null; } if (this.scriptNodeInterval) { clearInterval(this.scriptNodeInterval); this.scriptNodeInterval = null; } this.defaultPlaybackRate = 1; this.playbackRate = 1; this.scriptStartTime = 0; this.audioBufferSize = 0; this.engineType = AUDIO_ENGINE_TYPE.script; this.player.debug.log('AudioContext', 'destroy'); } // is playing isAudioPlaying() { return this.$audio && isFalse(this.$audio.paused) && isFalse(this.$audio.ended) && this.$audio.playbackRate !== 0 && this.$audio.readyState !== 0; } _bindAudioProxy() { const { proxy } = this.player.events; const canplayProxyDestroy = proxy(this.$audio, 'canplay', () => { this.player.debug.log('AudioContext', 'canplay'); if (this._delayPlay) { this._audioElementPlay(); } }); this.eventListenList.push(canplayProxyDestroy); } _getAudioElementReadyState() { let result = 0; if (this.$audio) { result = this.$audio.readyState; } return result; } audioElementPlay() { if (this.$audio) { const readyState = this._getAudioElementReadyState(); this.player.debug.log('AudioContext', `play and readyState: ${readyState}`); // wechat in ios not emit canplay 事件 if (readyState === 0 && !isWeChatInIOS()) { this.player.debug.warn('AudioContext', 'readyState is 0 and set _delayPlay to true'); this._delayPlay = true; return; } this._audioElementPlay(); } } _audioElementPlay() { this.$audio && this.$audio.play().then(() => { this._delayPlay = false; this.player.debug.log('AudioContext', '_audioElementPlay success'); setTimeout(() => { if (!this.isAudioPlaying()) { this.player.debug.warn('AudioContext', `play failed and retry play`); this._audioElementPlay(); } }, 100); if (this.isAudioPlaying()) { this.player.debug.log('AudioContext', `play success and remove document click event listener`); document.removeEventListener('click', this._audioElementPlay.bind(this)); } }).catch(e => { this.player.debug.error('AudioContext', '_audioElementPlay error', e); document.addEventListener('click', this._audioElementPlay.bind(this)); }); } getAudioBufferSize() { return this.audioBufferSize; } get oneBufferDuration() { return this.audioBufferSize / this.audioContext.sampleRate * 1000; // ms } get isActiveEngineType() { return this.engineType === AUDIO_ENGINE_TYPE.active; } initProcessor() { this.processor = new Processor(this.player, this, this.audioInfo.channels, this.audioBufferSize); this.rateProcessor = new RateProcessor(this.player, this, this.processor); } getAutoAudioEngineType() { let result = this.player._opt.audioEngine || AUDIO_ENGINE_TYPE.script; const _autoCalcAudioEngine = () => { if (isWeChatInAndroid()) { // wechat android result = AUDIO_ENGINE_TYPE.active; } else if (isIOS() && this.player._opt.supportLockScreenPlayAudio) { result = AUDIO_ENGINE_TYPE.script; } else if (isInHttps() && this.supportAudioWorklet()) { result = AUDIO_ENGINE_TYPE.worklet; } else { result = AUDIO_ENGINE_TYPE.script; } }; if (this.player._opt.audioEngine) { if (this.player._opt.audioEngine === AUDIO_ENGINE_TYPE.worklet && isInHttps()) { result = AUDIO_ENGINE_TYPE.worklet; } else if (this.player._opt.audioEngine === AUDIO_ENGINE_TYPE.active) { result = AUDIO_ENGINE_TYPE.active; } else if (this.player._opt.audioEngine === AUDIO_ENGINE_TYPE.script) { result = AUDIO_ENGINE_TYPE.script; } else { // auto _autoCalcAudioEngine(); } } else { _autoCalcAudioEngine(); } return result; } getAudioBufferSizeByType() { const engineType = this.engineType; this.player._opt.hasVideo; const weiXinInAndroidAudioBufferSize = this.player._opt.weiXinInAndroidAudioBufferSize; if (engineType === AUDIO_ENGINE_TYPE.worklet) { return 1024; } else if (engineType === AUDIO_ENGINE_TYPE.active) { return weiXinInAndroidAudioBufferSize || 4800; } else if (engineType === AUDIO_ENGINE_TYPE.script) { return 1024; } else { return 1024; } } supportAudioWorklet() { return this.audioContext && this.audioContext.audioWorklet; } // initScriptNode() { this.playing = true; if (this.hasInitScriptNode) { return; } this.initProcessor(); if (this.engineType === AUDIO_ENGINE_TYPE.worklet) { this.initWorkletScriptNode(); } else if (this.engineType === AUDIO_ENGINE_TYPE.active) { this.initIntervalScriptNode(); } else if (this.engineType === AUDIO_ENGINE_TYPE.script) { this.initProcessScriptNode(); } this.audioElementPlay(); } getEngineType() { return this.engineType; } isPlaybackRateSpeed() { return this.playbackRate > this.defaultPlaybackRate; } initProcessScriptNode() { const scriptNode = this.audioContext.createScriptProcessor(this.audioBufferSize, 0, this.audioInfo.channels); // tips: if audio isStateSuspended onaudioprocess method not working scriptNode.onaudioprocess = audioProcessingEvent => { const outputBuffer = audioProcessingEvent.outputBuffer; this.handleScriptNodeCallback(outputBuffer); }; scriptNode.connect(this.gainNode); this.scriptNode = scriptNode; this.gainNode.connect(this.mediaStreamAudioDestinationNode); if (this.$audio) { this.$audio.srcObject = this.mediaStreamAudioDestinationNode.stream; } else { this.gainNode.connect(this.audioContext.destination); } this.hasInitScriptNode = true; } initIntervalScriptNode() { this.scriptStartTime = 0; // 1000 * 4800 / 48000 = 100ms // 1000 * 1600 / 1600 = 100ms const intervalTime = 1000 * this.audioBufferSize / this.audioContext.sampleRate; this.scriptNodeInterval = setInterval(() => { if (this.bufferList.length === 0 || isFalse(this.playing) || this.isMute) { if (this.playing && isFalse(this.isMute)) { this.player.debug.log('AudioContext', `interval script node and bufferList is ${this.bufferList.length} or playing is ${this.playing}`); } return; } const audioSource = this.audioContext.createBufferSource(); const outputBuffer = this.audioContext.createBuffer(this.audioInfo.channels, this.audioBufferSize, this.audioContext.sampleRate); this.handleScriptNodeCallback(outputBuffer, () => { if (this.scriptStartTime < this.audioContext.currentTime) { this.player.debug.log('AudioContext', `script start time ${this.scriptStartTime} is less than current time ${this.audioContext.currentTime}`); this.scriptStartTime = this.audioContext.currentTime; } audioSource.buffer = outputBuffer; audioSource.connect(this.gainNode); audioSource.start(this.scriptStartTime); this.scriptStartTime += outputBuffer.duration; }); }, intervalTime); this.gainNode.connect(this.mediaStreamAudioDestinationNode); if (this.$audio) { this.$audio.srcObject = this.mediaStreamAudioDestinationNode.stream; } else { this.gainNode.connect(this.audioContext.destination); } this.hasInitScriptNode = true; } initWorkletScriptNode() { function audiWorkletProcessor() { class WorkletProcessor extends AudioWorkletProcessor { constructor() { super(); this.audioBufferSize = 1024; this.start = false; this.channels = 1; this.samplesArray = []; this.offset = 0; this.state = 0; this.port.onmessage = e => { // console.log('WorkletProcessor onmessage', e.data); if (e.data.message === "init") { this.audioBufferSize = e.data.audioBufferSize; this.start = e.data.start; this.channels = e.data.channels; this.state = 0; this.offset = 0; this.samplesArray = []; } else if (e.data.message === "stop") { this.state = 0; this.start = false; this.offset = 0; this.samplesArray = []; } else if (e.data.message === "data") { this.samplesArray.push(e.data.buffer); } else if (e.data.message === "zero") { this.samplesArray.push({ left: new Float32Array(this.audioBufferSize).fill(0), right: new Float32Array(this.audioBufferSize).fill(0) }); } }; } process(inputs, outputs, parameters) { const outputLeft = outputs[0][0]; const outputRight = outputs[0][1]; if (this.offset === 0) { this.port.postMessage({ message: "beep" }); } if (this.state === 0) { this.state = 1; } else if (this.state === 1 && this.samplesArray.length >= 4) { this.state = 2; } else if (this.state === 2) { const bufferItem = this.samplesArray[0]; for (let i = 0; i < outputLeft.length; i++) { if (this.channels === 1) { outputLeft[i] = bufferItem.left[i + this.offset]; } else if (this.channels === 2) { outputLeft[i] = bufferItem.left[i + this.offset]; if (outputRight) { outputRight[i] = bufferItem.right[i + this.offset]; } } } } else { if (this.channels === 1) { outputLeft.fill(0); } else if (this.channels === 2) { outputLeft.fill(0); if (outputRight) { outputRight.fill(0); } } } this.offset += 128; if (this.offset === this.audioBufferSize) { this.offset = 0; if (this.state === 2) { this.samplesArray.shift(); } if (this.samplesArray.length === 0) { this.state = 0; } } return this.start; } } registerProcessor('worklet-processor', WorkletProcessor); } let workletUrl = createWorkletModuleUrl(audiWorkletProcessor); this.workletUrl = workletUrl; this.audioContext.audioWorklet.addModule(workletUrl).then(() => { if (this.player.isDestroyedOrClosed()) { this.player.debug.log('AudioContext', 'initWorkletScriptNode() player is destroyed'); return; } if (!this.audioContext) { this.player.debug.warn('AudioContext', 'initWorkletScriptNode audioContext is null'); return; } let outputChannelCount = [1]; if (this.audioInfo.channels === 2) { outputChannelCount = [1, 1]; } try { this.workletProcessorNode = new AudioWorkletNode(this.audioContext, "worklet-processor", { numberOfOutputs: this.audioInfo.channels, outputChannelCount }); } catch (e) { // DOMException: Failed to construct 'AudioWorkletNode': AudioWorkletNode cannot be created: // The node name 'worklet-processor' is not defined in AudioWorkletGlobalScope. this.player.debug.error('AudioContext', 'initWorkletScriptNode error', e); this.workletProcessorNode = null; this.tierDownToProcessScript(); } if (!this.workletProcessorNode) { return; } this.workletProcessorNode.connect(this.gainNode); this.gainNode.connect(this.mediaStreamAudioDestinationNode); if (this.$audio) { this.$audio.srcObject = this.mediaStreamAudioDestinationNode.stream; } else { this.gainNode.connect(this.audioContext.destination); } this.hasInitScriptNode = true; this.workletProcessorNode.port.postMessage({ message: "init", audioBufferSize: this.audioBufferSize, start: true, channels: this.audioInfo.channels }); this.workletProcessorNode.port.onmessage = e => { // console.log('workletProcessorNode onmessage', e.data); if (this.workletProcessorNode) { if (this.audioContext) { this.handleScriptNodeCallback(this.workletProcessorNode, null, true); } else { this.workletProcessorNode.port.postMessage({ message: "zero" }); } } else { this.player.debug.error('AudioContext', 'workletProcessorNode is null'); } }; }); this.clearWorkletUrlTimeout = setTimeout(() => { URL.revokeObjectURL(this.workletUrl); this.workletUrl = null; this.clearWorkletUrlTimeout = null; }, URL_OBJECT_CLEAR_TIME); } tierDownToProcessScript() { this.player.debug.log('AudioContext', 'tierDownToProcessScript'); this.engineType = AUDIO_ENGINE_TYPE.script; this.audioBufferSize = this.getAudioBufferSizeByType(); this.initProcessScriptNode(); this.audioElementPlay(); } handleScriptNodeCallback(outputBuffer, cb) { let isWorklet = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; cb = cb || noop$3; let workletProcessorNode; let outputBufferLength = outputBuffer.length; if (isWorklet) { workletProcessorNode = outputBuffer; outputBufferLength = this.audioBufferSize; } const channels = this.audioInfo.channels; if (this.bufferList.length && this.playing) { this.player._opt; // 窗口隐藏,不触发音视频同步逻辑。 if (this.player.openSyncAudioAndVideo() && isTrue(this.player.visibility)) { this.calcPlaybackRateBySync(); const diff = this.player.getAudioSyncVideoDiff(); // audio > video then wait video if (diff > this.player._opt.syncAudioAndVideoDiff) { this.player.debug.warn('AudioContext', `audioSyncVideoOption more than diff :${diff}, waiting and bufferList is ${this.bufferList.length}`); // empty audio if (isWorklet) { workletProcessorNode.port.postMessage({ message: "zero" }); } else { this.fillScriptNodeOutputBuffer(outputBuffer, channels); } cb(); return; } } let bufferItem = this._provide(outputBufferLength); if (bufferItem.size === 0) { // this.player.debug.warn('AudioContext', `bufferList size is ${this.bufferList.length} outputBufferLength is ${outputBufferLength},and bufferItem.size is 0`) this.player.debug.warn('AudioContext', `bufferList size is ${this.bufferList.length} outputBufferLength is ${outputBufferLength},and bufferItem.size is 0`); // empty audio if (isWorklet) { workletProcessorNode.port.postMessage({ message: "zero" }); } else { this.fillScriptNodeOutputBuffer(outputBuffer, channels); } cb(); return; } // update audio time stamp if (bufferItem && bufferItem.ts) { this.player.audioTimestamp = bufferItem.ts; } if (isWorklet) { workletProcessorNode.port.postMessage({ message: "data", buffer: bufferItem }); } else { this.fillScriptNodeOutputBuffer(outputBuffer, channels, bufferItem); } cb(); } else { if (this.bufferList.length === 0 && this.playing && isFalse(this.isMute)) { this.player.debug.warn('AudioContext', `bufferList size is 0 and outputBufferLength is ${outputBufferLength}`); } if (isWorklet) { workletProcessorNode.port.postMessage({ message: "zero" }); } else { this.fillScriptNodeOutputBuffer(outputBuffer, channels); } cb(); } } fillScriptNodeOutputBuffer(outputBuffer, channels, bufferItem) { if (channels === 1) { const leftBuffer = outputBuffer.getChannelData(0); if (bufferItem) { if (bufferItem.size === 0) { leftBuffer.fill(0); } else { leftBuffer.set(bufferItem.left); } } else { leftBuffer.fill(0); } } else if (channels === 2) { const leftBuffer = outputBuffer.getChannelData(0); const rightBuffer = outputBuffer.getChannelData(1); if (bufferItem) { if (bufferItem.size === 0) { leftBuffer.fill(0); rightBuffer.fill(0); } else { leftBuffer.set(bufferItem.left); rightBuffer.set(bufferItem.right); } } else { leftBuffer.fill(0); rightBuffer.fill(0); } } } // play(buffer, ts) { // if is mute if (this.isMute) { return; } if (!this.hasInitScriptNode) { this.player.debug.warn('AudioContext', 'play has not init script node'); return; } this.hasAudio = true; this.player.latestAudioTimestamp = ts; this.bufferList.push({ buffer, ts }); // player audio if (isFalse(this.player.openSyncAudioAndVideo())) { this.calcPlaybackRateByBuffer(); } } // 音视频同步 calcPlaybackRateBySync() { if (this.isMute) { return; } if (!this.playing) { return; } const audioMaxVideoDiff = AIDIO_MAX_VIDEO_DIFF; const dropMaxSize = Math.floor(audioMaxVideoDiff / this.oneBufferDuration); // more than max size and drop if (this.bufferList.length > dropMaxSize) { this.player.debug.warn('AudioContext', `bufferList length ${this.bufferList.length} more than ${dropMaxSize}, and drop`); this.clear(); return; } const diff = this.player.getAudioSyncVideoDiff(); if (this.getEngineType() === AUDIO_ENGINE_TYPE.active) { // audio less than video (syncAudioAndVideoDiff) and if (diff < -this.player._opt.syncAudioAndVideoDiff) { this.player.debug.warn('AudioContext', `engine active , audioSyncVideoOption ${-this.player._opt.syncAudioAndVideoDiff} less than diff :${diff}, and bufferlist is ${this.bufferList.length}`); const currentVideoTimestamp = this.player.getRenderCurrentPts(); while (this.bufferList.length > 0) { const bufferItem = this.bufferList[0]; const diff = bufferItem.ts - currentVideoTimestamp; if (diff > -this.player._opt.syncAudioAndVideoDiff / 2) { this.player.audioTimestamp = bufferItem.ts; this.player.debug.log('AudioContext', `engine active , audioSyncVideoOption item.ts is ${bufferItem.ts} and currentVideoTimestamp is ${currentVideoTimestamp}, diff is ${diff} > -${this.player._opt.syncAudioAndVideoDiff / 2} and end`); break; } this.bufferList.shift(); this.player.audioTimestamp = bufferItem.ts; } } } else { // audio 的速率小于 video // 持续丢掉音频数据,让音频数据赶上视频。 let playbackRate = this.playbackRate; if (diff < -this.player._opt.syncAudioAndVideoDiff) { if (playbackRate === this.defaultPlaybackRate) { this.player.debug.log('AudioContext', `audioSyncVideoOption ${-this.player._opt.syncAudioAndVideoDiff} less than diff :${diff}, speed up, playbackRate is ${playbackRate}, and bufferList is ${this.bufferList.length}`); playbackRate = this.defaultPlaybackRate + 0.2; } } else if (diff > -this.player._opt.syncAudioAndVideoDiff / 2) { if (playbackRate !== this.defaultPlaybackRate) { this.player.debug.log('AudioContext', `diff is ${diff} > -${this.player._opt.syncAudioAndVideoDiff / 2} and speed to 1`); playbackRate = this.defaultPlaybackRate; } } this.updatePlaybackRate(playbackRate); } } // 缓冲区。 calcPlaybackRateByBuffer() { if (this.isMute) { return; } if (!this.playing) { return; } let playbackRate = this.playbackRate; let audioSyncVideoDiff = 1 * 1000; // 1s let audioMaxVideoDiff = 5 * 1000; // 5s if (this.isAudioPlayer) { audioSyncVideoDiff = this.player._opt.videoBufferDelay; audioMaxVideoDiff = this.player._opt.videoBufferMax; } const maxSize = Math.floor(audioSyncVideoDiff / this.oneBufferDuration); const dropMaxSize = Math.floor(audioMaxVideoDiff / this.oneBufferDuration); if (this.bufferList.length > dropMaxSize) { this.player.debug.warn('AudioContext', `bufferList length ${this.bufferList.length} more than ${dropMaxSize}, and drop`); this.clear(); return; } // active 没法实现倍率播放。 if (this.getEngineType() === AUDIO_ENGINE_TYPE.active) { return; } // out of memory if (this.bufferList.length > maxSize) { playbackRate = this.defaultPlaybackRate + 0.2; this.player.debug.log('AudioContext', `bufferList length ${this.bufferList.length} more than ${maxSize}, speed up, playbackRate is ${playbackRate}`); } else if (this.bufferList.length < maxSize / 2) { playbackRate = this.defaultPlaybackRate; } this.updatePlaybackRate(playbackRate); } updatePlaybackRate(playbackRate) { if (this.rateProcessor) { this.playbackRate = playbackRate; this.rateProcessor.setRate(this.playbackRate); } } _provide(size) { let provider = this.playbackRate === 1 ? this.processor : this.rateProcessor; let audioData = provider.provide(size); return audioData; } } class AudioLoader extends Emitter { constructor(player) { super(); this.player = player; this.$video = player.video.$videoElement; this.init = false; // if (this.player._opt.hlsUseCanvasRender) { this.$video = this.player.hlsDecoder.$videoElement; } // if (this.player._opt.webrtcUseCanvasRender) { this.$video = this.player.webrtc.$videoElement; } this.audioInfo = { encTypeCode: '', encType: '', channels: '', sampleRate: '', depth: '' }; this.player.debug.log('Audio', 'init'); } destroy() { this.resetInit(); this.off(); this.player.debug.log('Audio', 'destroy'); } resetInit() { this.init = false; this.audioInfo = { encTypeCode: '', encType: '', channels: '', sampleRate: '', depth: '' }; } getAudioInfo() { return this.audioInfo; } updateAudioInfo(data) { if (data.encTypeCode) { this.audioInfo.encTypeCode = data.encTypeCode; this.audioInfo.encType = AUDIO_ENC_TYPE[data.encTypeCode]; } if (data.encType) { this.audioInfo.encType = data.encType; } if (data.channels) { this.audioInfo.channels = data.channels; } if (data.sampleRate) { this.audioInfo.sampleRate = data.sampleRate; } if (data.depth) { this.audioInfo.depth = data.depth; } // audio 基本信息 if (this.audioInfo.sampleRate && this.audioInfo.channels && this.audioInfo.encType && !this.init) { this.player.debug.log('Audio', 'audioInfo', JSON.stringify(this.audioInfo)); this.player.emit(EVENTS.audioInfo, this.audioInfo); this.init = true; } } get isPlaying() { return true; } get volume() { return isTrue(this.$video.muted) ? 0 : this.$video.volume; } get isMute() { return this.$video.volume === 0 || isTrue(this.$video.muted); } mute(muted) { this.setVolume(muted ? 0 : this.player.lastVolume || 0.5); } setVolume(volume) { volume = parseFloat(volume); if (isNaN(volume)) { return; } volume = clamp(volume, 0, 1); // 值从0.0(静音)到1.0(最大音量)。 if (this.$video.muted) { this.$video.muted = false; } // ios 上面通过设置 volume 为 0 是无效果的。只能通过 muted 来设置静音 if (isIOS()) { this.$video.muted = volume === 0; } if (this.player.isAliyunRtc()) { if (this.player.aliyunRtcDecoder && this.player.aliyunRtcDecoder.aliyunRtcRemoteStream) { this.player.aliyunRtcDecoder.aliyunRtcRemoteStream.muted = volume === 0; } } this.$video.volume = volume; this.player.emit(EVENTS.volumechange, this.player.volume); } clear() {} play() {} pause() {} resume() {} getEngineType() { return 'audio'; } isPlaybackRateSpeed() { return false; } getAudioBufferSize() { return 0; } setRate() { // 空的。 } initScriptNode() { // empty } initScriptNodeDelay() { // empty } } class AudioPlaybackLoader extends AudioContextLoader { constructor(player) { super(player); this.delayTimeout = null; this.player.on(EVENTS.playbackPause, flag => { this.listenPlaybackPause(flag); }); this.player.debug.log('AudioPlaybackContext', 'init'); } destroy() { if (this.delayTimeout) { clearTimeout(this.delayTimeout); this.delayTimeout = null; } super.destroy(); this.player.debug.log(`AudioPlaybackLoader`, 'destroy'); } listenPlaybackPause(flag) { if (flag) { this.pause(); if (this.player.playback.isPlaybackPauseClearCache) { this.clear(); } } else { this.resume(); } } // initScriptNodeDelay() { const delayTime = this.player._opt.playbackDelayTime; if (delayTime > 0) { this.delayTimeout = setTimeout(() => { this.initScriptNode(); }, delayTime); } else { this.initScriptNode(); } } setRate(value) { if (value !== this.defaultPlaybackRate && this.rateProcessor) { this.player.debug.log('AudioPlaybackContext', 'setRate', value); this.defaultPlaybackRate = value; this.updatePlaybackRate(value); } } } class AudioPlayerLoader extends AudioContextLoader { constructor(player) { super(player); this.TAG_NAME = 'AudioPlayerLoader'; this.isAudioPlayer = true; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.TAG_NAME, 'destroy'); } play(buffer, ts) { if (isFalse(this.playing)) { return; } super.play(buffer, ts); } pause() { this.player.debug.log(this.TAG_NAME, 'pause'); this.playing = false; this.clear(); } resume() { this.player.debug.log(this.TAG_NAME, 'resume'); this.playing = true; } } class Audio { constructor(player) { const Loader = Audio.getLoaderFactory(player._opt); return new Loader(player); } static getLoaderFactory(opt) { if (opt.playType === PLAY_TYPE.playbackTF) { if (opt.useMSE && opt.mseDecodeAudio) { return AudioLoader; } return AudioPlaybackLoader; } else if (opt.playType === PLAY_TYPE.playerAudio) { return AudioPlayerLoader; } else { if (opt.isHls && isFalse(opt.supportHls265) || opt.isWebrtc && isFalse(opt.isWebrtcH265) || opt.useMSE && opt.mseDecodeAudio || opt.isAliyunRtc) { return AudioLoader; } return AudioContextLoader; } } } class FetchLoader$2 extends Emitter { constructor(player) { super(); this.player = player; this.playing = false; this._requestAbort = false; this._status = LOADER_STATUS.idle; this.writableStream = null; this.abortController = new AbortController(); // this.streamRate = calculationRate(rate => { player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); this.streamRateInterval = null; player.debug.log('FetchStream', 'init'); } destroy() { this.abort(); if (this.writableStream && isFalse(this.writableStream.locked)) { this.writableStream.close().catch(e => { // ignore // The stream you are trying to close is locked. this.player.debug.log('FetchStream', `destroy and writableStream.close()`, e); }); } this.writableStream = null; this.off(); this._status = LOADER_STATUS.idle; this.streamRate = null; this.stopStreamRateInterval(); this.player.debug.log('FetchStream', 'destroy'); } startStreamRateInterval() { this.stopStreamRateInterval(); this.streamRateInterval = setInterval(() => { this.streamRate && this.streamRate(0); }, 1000); } stopStreamRateInterval() { if (this.streamRateInterval) { clearInterval(this.streamRateInterval); this.streamRateInterval = null; } } /** * * @param url * @param options */ fetchStream(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; const { demux } = this.player; this.player._times.streamStart = now$2(); const fetchOptions = Object.assign({ signal: this.abortController.signal }, { headers: options.headers || {} }); fetch(url, fetchOptions).then(res => { if (this._requestAbort) { this._status = LOADER_STATUS.idle; res.body.cancel(); return; } if (!isFetchSuccess(res)) { this.player.debug.error('FetchStream', `fetch response status is ${res.status} and ok is ${res.ok} and emit error and next abort()`); this.abort(); // / 这边会报用户 aborted a request 错误。 this.emit(EVENTS_ERROR.fetchError, `fetch response status is ${res.status} and ok is ${res.ok}`); return; } this.emit(EVENTS.streamSuccess); this.startStreamRateInterval(); if (supportWritableStream()) { this.player.debug.log('FetchStream', 'use WritableStream() to read stream'); this.writableStream = new WritableStream({ write: value => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this.player.debug.log('FetchStream', 'writableStream.write() and this.abortController.signal.aborted so return'); this._status = LOADER_STATUS.complete; return; } if (isTrue(this._requestAbort)) { this.player.debug.log('FetchStream', 'writableStream.write() and this._requestAbort is true so return'); this._status = LOADER_STATUS.complete; return; } this._status = LOADER_STATUS.buffering; this.streamRate && this.streamRate(value.byteLength); return demux.dispatch(value); }, close: () => { this._status = LOADER_STATUS.complete; demux.close(); this.emit(EVENTS.streamEnd, 'fetch done'); }, abort: e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this.player.debug.log('FetchStream', 'writableStream.abort() and this.abortController.signal.aborted so return'); this._status = LOADER_STATUS.complete; return; } demux.close(); const errorString = e.toString(); // aborted a request 。 if (errorString.indexOf(FETCH_ERROR.abortError) !== -1) { return; } if (errorString.indexOf(FETCH_ERROR.abortError2) !== -1) { return; } if (e.name === FETCH_ERROR.abort) { return; } this.abort(); // / 这边会报用户 aborted a request 错误。 this.emit(EVENTS_ERROR.fetchError, e); } }); res.body.pipeTo(this.writableStream); } else { this.player.debug.log('FetchStream', 'not support WritableStream and use getReader() to read stream'); const reader = res.body.getReader(); const fetchNext = () => { reader.read().then(_ref => { let { done, value } = _ref; if (done) { this._status = LOADER_STATUS.complete; demux.close(); this.emit(EVENTS.streamEnd, 'fetch done'); return; } if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this.player.debug.log('FetchStream', 'reader.read() and this.abortController.signal.aborted so return'); this._status = LOADER_STATUS.complete; return; } if (isTrue(this._requestAbort)) { this.player.debug.log('FetchStream', 'reader.read() and this._requestAbort is true so return'); this._status = LOADER_STATUS.complete; return; } this._status = LOADER_STATUS.buffering; this.streamRate && this.streamRate(value.byteLength); demux.dispatch(value); fetchNext(); }).catch(e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this.player.debug.log('FetchStream', 'reader.read().catch() and this.abortController.signal.aborted so return'); this._status = LOADER_STATUS.complete; return; } demux.close(); const errorString = e.toString(); // aborted a request 。 if (errorString.indexOf(FETCH_ERROR.abortError) !== -1) { return; } if (errorString.indexOf(FETCH_ERROR.abortError2) !== -1) { return; } if (e.name === FETCH_ERROR.abort) { return; } this.abort(); // / 这边会报用户 aborted a request 错误。 this.emit(EVENTS_ERROR.fetchError, e); }); }; fetchNext(); } }).catch(e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { return; } if (e.name === 'AbortError') { return; } demux.close(); this.abort(); this.emit(EVENTS_ERROR.fetchError, e); }); } abort() { this._requestAbort = true; const _isChrome = isChrome(); if (this._status !== LOADER_STATUS.buffering || isFalse(_isChrome)) { // Chrome may throw Exception-like things here, avoid using if is buffering if (this.abortController) { try { this.abortController.abort(); } catch (e) { // ignore } this.abortController = null; } } else { this.abortController = null; this.player.debug.log('FetchStream', `abort() and not abortController.abort() _status is ${this._status} and _isChrome is ${_isChrome}`); } } getStreamType() { return PLAYER_STREAM_TYPE.fetch; } } class FetchWorkerLoader extends Emitter { constructor(player) { super(); this.TAG_NAME = 'FetchWorkerLoader'; this.player = player; this.playing = false; this.fetchWorker = null; this.workerClearTimeout = null; this.workerUrl = null; this.destroyResolve = null; this.decoderWorkerCloseTimeout = null; this.abortController = new AbortController(); // this.streamRate = calculationRate(rate => { player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); this.streamRateInterval = null; this._initFetchWorker(); player.debug.log(this.TAG_NAME, 'init'); } destroy() { return new Promise((resolve, reject) => { if (this.fetchWorker) { this.player.debug.log(this.TAG_NAME, 'send destroy'); // fetch maybe not abort this.fetchWorker.postMessage({ cmd: WORKER_FETCH_CMD_TYPE.destroy }); this.destroyResolve = resolve; this.decoderWorkerCloseTimeout = setTimeout(() => { this.player.debug.warn(this.TAG_NAME, 'send close but not response and destroy directly'); if (this.decoderWorkerCloseTimeout) { clearTimeout(this.decoderWorkerCloseTimeout); this.decoderWorkerCloseTimeout = null; } this._destroy(); setTimeout(() => { resolve(); }, 0); }, 2 * 1000); } else { this._destroy(); setTimeout(() => { resolve(); }, 0); } }); } _destroy() { this.off(); if (this.decoderWorkerCloseTimeout) { clearTimeout(this.decoderWorkerCloseTimeout); this.decoderWorkerCloseTimeout = null; } // todo: if (this.workerUrl) { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; } if (this.workerClearTimeout) { clearTimeout(this.workerClearTimeout); this.workerClearTimeout = null; } this._stopStreamRateInterval(); this.streamRate = null; if (this.fetchWorker) { this.fetchWorker.terminate(); this.fetchWorker.onmessage = null; this.fetchWorker = null; } if (this.destroyResolve) { this.destroyResolve(); this.destroyResolve = null; } this.player.debug.log(this.TAG_NAME, 'destroy'); } _initFetchWorker() { // fetch in worker // worker fun function FetchWorker() { function isTrue(value) { return value === true || value === 'true'; } function isFalse(value) { return value === false || value === 'false'; } const FETCH_ERROR = { abortError: 'The user aborted a request', abortError2: 'AbortError', abort: 'AbortError' }; const WORKER_FETCH_CMD_TYPE = { fetch: 'fetch', destroy: 'destroy', destroyEnd: 'destroyEnd', buffer: 'buffer', fetchError: 'fetchError', fetchClose: 'fetchClose', fetchSuccess: 'fetchSuccess' }; const LOADER_STATUS = { idle: 'idle', // connecting: 'connecting', // buffering: 'buffering', // error: 'error', // complete: 'complete' // }; function isFetchSuccess(res) { return res.ok && res.status >= 200 && res.status <= 299; } function supportWritableStream() { return typeof WritableStream !== 'undefined'; } class FetchLoader { constructor() { this._requestAbort = false; this._status = LOADER_STATUS.idle; this.writableStream = null; this.isChrome = false; this.abortController = new AbortController(); } destroy() { this.abort(); if (this.writableStream && isFalse(this.writableStream.locked)) { this.writableStream.close().catch(e => { // ignore // The stream you are trying to close is locked. }); } this.writableStream = null; this._status = LOADER_STATUS.idle; } /** * * @param url * @param options */ fetchStream(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; const fetchOptions = Object.assign({ signal: this.abortController.signal }, { headers: options.headers || {} }); fetch(url, fetchOptions).then(res => { if (this._requestAbort) { this._status = LOADER_STATUS.idle; res.body.cancel(); return; } if (!isFetchSuccess(res)) { this.abort(); // / 这边会报用户 aborted a request 错误。 postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchError, message: `fetch response status is ${res.status} and ok is ${res.ok}` }); return; } postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchSuccess }); if (supportWritableStream()) { this.writableStream = new WritableStream({ write: value => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this._status = LOADER_STATUS.complete; return; } if (isTrue(this._requestAbort)) { this._status = LOADER_STATUS.complete; return; } this._status = LOADER_STATUS.buffering; postMessage({ cmd: WORKER_FETCH_CMD_TYPE.buffer, buffer: value }, [value.buffer]); return; }, close: () => { this._status = LOADER_STATUS.complete; postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchClose }); }, abort: e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this._status = LOADER_STATUS.complete; return; } const errorString = e.toString(); // aborted a request 。 if (errorString.indexOf(FETCH_ERROR.abortError) !== -1) { return; } if (errorString.indexOf(FETCH_ERROR.abortError2) !== -1) { return; } if (e.name === FETCH_ERROR.abort) { return; } this.abort(); // / 这边会报用户 aborted a request 错误。 postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchError, message: e.toString() }); } }); res.body.pipeTo(this.writableStream); } else { const reader = res.body.getReader(); const fetchNext = () => { reader.read().then(_ref => { let { done, value } = _ref; if (done) { this._status = LOADER_STATUS.complete; postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchClose }); return; } if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this._status = LOADER_STATUS.complete; return; } if (isTrue(this._requestAbort)) { this._status = LOADER_STATUS.complete; return; } this._status = LOADER_STATUS.buffering; postMessage({ cmd: WORKER_FETCH_CMD_TYPE.buffer, buffer: value }, [value.buffer]); fetchNext(); }).catch(e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { this._status = LOADER_STATUS.complete; return; } const errorString = e.toString(); // aborted a request 。 if (errorString.indexOf(FETCH_ERROR.abortError) !== -1) { return; } if (errorString.indexOf(FETCH_ERROR.abortError2) !== -1) { return; } if (e.name === FETCH_ERROR.abort) { return; } this.abort(); // / 这边会报用户 aborted a request 错误。 postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchError, message: e.toString() }); }); }; fetchNext(); } }).catch(e => { if (this.abortController && this.abortController.signal && this.abortController.signal.aborted) { return; } if (e.name === 'AbortError') { return; } this.abort(); postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetchError, message: e.toString() }); }); } abort() { this._requestAbort = true; if (this._status !== LOADER_STATUS.buffering || isFalse(fetchLeader.isChrome)) { if (this.abortController) { try { this.abortController.abort(); } catch (e) {} this.abortController = null; } } else { this.abortController = null; } } } let fetchLeader = new FetchLoader(); self.onmessage = e => { const msg = e.data; switch (msg.cmd) { case WORKER_FETCH_CMD_TYPE.fetch: fetchLeader.isChrome = isTrue(msg.isChrome); fetchLeader.fetchStream(msg.url, JSON.parse(msg.options)); break; case WORKER_FETCH_CMD_TYPE.destroy: fetchLeader.destroy(); fetchLeader = null; postMessage({ cmd: WORKER_FETCH_CMD_TYPE.destroyEnd }); break; } }; } const FetchWorkerString = function2String(FetchWorker.toString()); const blob = new Blob([FetchWorkerString], { type: "text/javascript" }); let workerUrl = URL.createObjectURL(blob); const fetchWorker = new Worker(workerUrl); this.workerUrl = workerUrl; // 必须要释放,不然每次调用内存都明显泄露内存 // chrome 83 file协议下如果直接释放,将会使WebWorker无法启动 this.workerClearTimeout = setTimeout(() => { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; this.workerClearTimeout = null; }, URL_OBJECT_CLEAR_TIME); fetchWorker.onmessage = event => { const { demux } = this.player; const msg = event.data; switch (msg.cmd) { case WORKER_FETCH_CMD_TYPE.buffer: this.streamRate && this.streamRate(msg.buffer.byteLength); demux.dispatch(msg.buffer); break; case WORKER_FETCH_CMD_TYPE.fetchSuccess: this.emit(EVENTS.streamSuccess); this._startStreamRateInterval(); break; case WORKER_FETCH_CMD_TYPE.fetchClose: demux.close(); this.emit(EVENTS.streamEnd, 'fetch done'); break; case WORKER_FETCH_CMD_TYPE.fetchError: demux.close(); // / 这边会报用户 aborted a request 错误。 this.emit(EVENTS_ERROR.fetchError, msg.message); break; case WORKER_FETCH_CMD_TYPE.destroyEnd: this._destroy(); break; } }; this.fetchWorker = fetchWorker; } _startStreamRateInterval() { this._stopStreamRateInterval(); this.streamRateInterval = setInterval(() => { this.streamRate && this.streamRate(0); }, 1000); } _stopStreamRateInterval() { if (this.streamRateInterval) { clearInterval(this.streamRateInterval); this.streamRateInterval = null; } } fetchStream(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; this.player._times.streamStart = now$2(); this.fetchWorker.postMessage({ cmd: WORKER_FETCH_CMD_TYPE.fetch, url, isChrome: isChrome(), options: JSON.stringify(options) }); } getStreamType() { return PLAYER_STREAM_TYPE.fetch; } } class WebsocketLoader extends Emitter { constructor(player) { super(); this.player = player; this.socket = null; this.socketStatus = WEBSOCKET_STATUS.notConnect; this.wsUrl = null; this.requestAbort = false; this.socketDestroyFnList = []; // this.streamRate = calculationRate(rate => { player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); this.streamRateInterval = null; player.debug.log('WebsocketStream', 'init'); } destroy() { this._closeWebSocket(); this.stopStreamRateInterval(); this.wsUrl = null; this.off(); this.player.debug.log('WebsocketStream', 'destroy'); } startStreamRateInterval() { this.stopStreamRateInterval(); this.streamRateInterval = setInterval(() => { this.streamRate && this.streamRate(0); }, 1000); } stopStreamRateInterval() { if (this.streamRateInterval) { clearInterval(this.streamRateInterval); this.streamRateInterval = null; } } _createWebSocket() { const player = this.player; const { debug, events: { proxy }, demux } = player; this.socket = new WebSocket(this.wsUrl); this.socket.binaryType = 'arraybuffer'; const socketOpenDestroy = proxy(this.socket, 'open', () => { debug.log('WebsocketStream', 'socket open'); this.socketStatus = WEBSOCKET_STATUS.open; this.emit(EVENTS.streamSuccess); this.player.emit(EVENTS.websocketOpen); this.startStreamRateInterval(); }); const socketMessageDestroy = proxy(this.socket, 'message', event => { this.streamRate && this.streamRate(event.data.byteLength); this._handleMessage(event.data); }); const socketCloseDestroy = proxy(this.socket, 'close', event => { debug.log('WebsocketStream', `socket close and code is ${event.code}`); if (event.code === 1006) { debug.error('WebsocketStream', `socket close abnormally and code is ${event.code}`); } if (isTrue(this.requestAbort)) { this.requestAbort = false; debug.log('WebsocketStream', `socket close and requestAbort is true`); return; } demux.close(); this.socketStatus = WEBSOCKET_STATUS.close; this.player.emit(EVENTS.websocketClose, event.code); this.emit(EVENTS.streamEnd, event.code); }); const socketErrorDestroy = proxy(this.socket, 'error', error => { debug.error('WebsocketStream', 'socket error', error); this.socketStatus = WEBSOCKET_STATUS.error; this.emit(EVENTS_ERROR.websocketError, error); demux.close(); debug.log('WebsocketStream', `socket error:`, error.isTrusted ? 'websocket user aborted' : 'websocket error'); }); this.socketDestroyFnList.push(socketOpenDestroy, socketMessageDestroy, socketCloseDestroy, socketErrorDestroy); } _closeWebSocket() { this.socketDestroyFnList.forEach(event => event()); if (this.socket && (this.socket.readyState === 0 || this.socket.readyState === 1)) { this.requestAbort = true; // CONNECTING || OPEN this.socket.close(1000, 'Client disconnecting'); } else { if (this.socket) { this.player.debug.log('WebsocketStream', `_closeWebSocket() socket is null or socket status is ${this.socket && this.socket.readyState}`); } } this.socket = null; this.socketStatus = WEBSOCKET_STATUS.notConnect; this.streamRate = null; } // _handleMessage(message) { const { demux } = this.player; if (!demux) { this.player.debug.warn('WebsocketStream', 'websocket handle message demux is null'); return; } demux.dispatch(message); } /** * * @param url * @param options */ fetchStream(url, options) { this.player._times.streamStart = now$2(); this.wsUrl = url; this._createWebSocket(); } sendMessage(msg) { if (this.socket) { if (this.socketStatus === WEBSOCKET_STATUS.open) { this.socket.send(msg); } else { this.player.debug.error('WebsocketStream', `websocket send message error and socket status is ${this.socketStatus}`); } } else { this.player.debug.error('WebsocketStream', 'websocket send message socket is null'); } } /** * */ resetFetchStream() { this._closeWebSocket(); this._createWebSocket(); } getStreamType() { return PLAYER_STREAM_TYPE.websocket; } } class HlsLoader$1 extends Emitter { constructor(player) { super(); this.player = player; player.debug.log('HlsStream', 'init'); } destroy() { this.off(); this.player.debug.log('HlsStream', 'destroy'); } fetchStream(url) { const { hlsDecoder, debug } = this.player; this.player._times.streamStart = now$2(); hlsDecoder.loadSource(url).then(() => { this.player.debug.log('HlsStream', 'loadSource success'); this.emit(EVENTS.streamSuccess); }).catch(error => { this.emit(EVENTS_ERROR.hlsError, error); }); } getStreamType() { return PLAYER_STREAM_TYPE.hls; } } class WebrtcLoader extends Emitter { constructor(player) { super(); this.player = player; this.webrctUrl = null; player.debug.log('WebrtcStream', 'init'); } destroy() { this.webrctUrl = null; this.off(); this.player.debug.log('WebrtcStream', 'destroy'); } /** * webrtc://host:port/webrtc/play/[streamPath] * @param url */ fetchStream(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; const { webrtc, debug } = this.player; this.player._times.streamStart = now$2(); this.webrctUrl = url.replace('webrtc:', window.location.protocol); // if (this.webrctUrl.indexOf('/webrtc/play') === -1 && this.player.isWebrtcForM7S()) { const webrtcUrlObj = new URL(this.webrctUrl); const path = '/webrtc/play' + webrtcUrlObj.pathname; this.webrctUrl = webrtcUrlObj.origin + path + webrtcUrlObj.search; this.player.debug.log('WebrtcStream', `original url is ${url}, and new url is: ${this.webrctUrl}`); } webrtc.loadSource(this.webrctUrl, options).then(() => { this.player.debug.log('WebrtcStream', 'loadSource success'); this.emit(EVENTS.streamSuccess); }).catch(error => { this.player.debug.error('WebrtcStream', 'loadSource error', error); this.emit(EVENTS_ERROR.webrtcError, error); }); } getStreamType() { return PLAYER_STREAM_TYPE.webrtc; } } class WebTransportLoader$1 extends Emitter { constructor(player) { super(); this.player = player; this.transport = null; this.wtUrl = null; // this.streamRate = calculationRate(rate => { player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); this.streamRateInterval = null; player.debug.log('WebTransportLoader', 'init'); } destroy() { this.abort(); this.off(); this.player.debug.log('WebTransportLoader', 'destroy'); } startStreamRateInterval() { this.stopStreamRateInterval(); this.streamRateInterval = setInterval(() => { this.streamRate && this.streamRate(0); }, 1000); } stopStreamRateInterval() { if (this.streamRateInterval) { clearInterval(this.streamRateInterval); this.streamRateInterval = null; } } _createWebTransport() { const player = this.player; const { debug, events: { proxy }, demux } = player; try { this.transport = new WebTransport(this.wtUrl); this.transport.ready.then(() => { this.emit(EVENTS.streamSuccess); this.startStreamRateInterval(); this.transport.createBidirectionalStream().then(stream => { stream.readable.pipeTo(new WritableStream(demux.input)); }); }).catch(e => { this.player.debug.warn('WebTransportLoader', '_createWebTransport-ready', e); }); } catch (e) { this.player.debug.warn('WebTransportLoader', '_createWebTransport', e); } } fetchStream(url) { this.player._times.streamStart = now$2(); this.wtUrl = url.replace(/^wt:/, 'https:'); this._createWebTransport(); } abort() { if (this.transport) { try { this.transport.close(); this.transport = null; } catch (e) { this.transport = null; } } } getStreamType() { return PLAYER_STREAM_TYPE.webTransport; } } class WorkerLoader extends Emitter { constructor(player) { super(); this.player = player; this.workUrl = null; player.debug.log('WorkerStream', 'init'); } destroy() { this.workUrl = null; this.off(); this.player.debug.log('WorkerStream', 'destroy'); } sendMessage(message) { this.player.decoderWorker.workerSendMessage(message); } fetchStream(url) { this.workUrl = url; this.player._times.streamStart = now$2(); this.player.decoderWorker.workerFetchStream(url); } getStreamType() { const protocol = this.player._opt.protocol; const streamType = protocol === PLAYER_PLAY_PROTOCOL.fetch ? PLAYER_STREAM_TYPE.fetch : PLAYER_STREAM_TYPE.websocket; return PLAYER_STREAM_TYPE.worker + ' ' + streamType; } } class AliyunRtcLoader extends Emitter { constructor(player) { super(); this.TAG_NAME = 'AliyunRtcLoader'; this.player = player; player.debug.log(this.TAG_NAME, 'init'); } destroy() { this.off(); this.player.debug.log(this.TAG_NAME, 'destroy'); } fetchStream(url) { const { aliyunRtcDecoder } = this.player; this.player._times.streamStart = now$2(); aliyunRtcDecoder.loadSource(url).then(() => { this.player.debug.log(this.TAG_NAME, 'loadSource success'); this.emit(EVENTS.streamSuccess); }).catch(error => { this.player.debug.error(this.TAG_NAME, 'loadSource error', error); this.emit(EVENTS_ERROR.aliyunRtcError, error); }); } getStreamType() { return PLAYER_STREAM_TYPE.aliyunRtc; } } class Stream$1 { constructor(player) { const Loader = Stream$1.getLoaderFactory(player._opt); return new Loader(player); } static getLoaderFactory(opt) { const { protocol, useWasm, playType, useWCS, useMSE, demuxUseWorker, mainThreadFetchUseWorker } = opt; if (protocol === PLAYER_PLAY_PROTOCOL.fetch) { if (playType === PLAY_TYPE.playerAudio) { return WorkerLoader; } else if (playType === PLAY_TYPE.player) { // wasm 默认走的worker // 排除掉mse 和wcs 单独解码的情况 if (useWasm && !onlyMseOrWcsVideo(opt)) { return WorkerLoader; } else { if (demuxUseWorker) { return WorkerLoader; } else { if (mainThreadFetchUseWorker) { return FetchWorkerLoader; } else { return FetchLoader$2; } } } } else { // playback if (useWCS || useMSE) { // return FetchLoader; if (demuxUseWorker) { return WorkerLoader; } else { if (mainThreadFetchUseWorker) { return FetchWorkerLoader; } else { return FetchLoader$2; } } } else { return WorkerLoader; } } } else if (protocol === PLAYER_PLAY_PROTOCOL.websocket) { if (playType === PLAY_TYPE.playerAudio) { return WorkerLoader; } else if (playType === PLAY_TYPE.player) { if (useWasm && !onlyMseOrWcsVideo(opt)) { return WorkerLoader; } else { if (demuxUseWorker) { return WorkerLoader; } else { return WebsocketLoader; } } } else { // playback if (useWCS || useMSE) { if (demuxUseWorker) { return WorkerLoader; } else { return WebsocketLoader; } } else { return WorkerLoader; } } } else if (protocol === PLAYER_PLAY_PROTOCOL.hls) { return HlsLoader$1; } else if (protocol === PLAYER_PLAY_PROTOCOL.webrtc) { return WebrtcLoader; } else if (protocol === PLAYER_PLAY_PROTOCOL.webTransport) { return WebTransportLoader$1; } else if (protocol === PLAYER_PLAY_PROTOCOL.aliyunRtc) { return AliyunRtcLoader; } } } var RecordRTC_1 = createCommonjsModule(function (module) { // Last time updated: 2021-03-09 3:20:22 AM UTC // ________________ // RecordRTC v5.6.2 // Open-Sourced: https://github.com/muaz-khan/RecordRTC // -------------------------------------------------- // Muaz Khan - www.MuazKhan.com // MIT License - www.WebRTC-Experiment.com/licence // -------------------------------------------------- // ____________ // RecordRTC.js /** * {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows. * @summary Record audio, video or screen inside the browser. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef RecordRTC * @class * @example * var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], { * type: 'video', // audio or video or gif or canvas * recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc * }); * recorder.startRecording(); * @see For further information: * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc. * @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.} */ function RecordRTC(mediaStream, config) { if (!mediaStream) { throw 'First parameter is required.'; } config = config || { type: 'video' }; config = new RecordRTCConfiguration(mediaStream, config); // a reference to user's recordRTC object var self = this; function startRecording(config2) { if (!config.disableLogs) { console.log('RecordRTC version: ', self.version); } if (!!config2) { // allow users to set options using startRecording method // config2 is similar to main "config" object (second parameter over RecordRTC constructor) config = new RecordRTCConfiguration(mediaStream, config2); } if (!config.disableLogs) { console.log('started recording ' + config.type + ' stream.'); } if (mediaRecorder) { mediaRecorder.clearRecordedData(); mediaRecorder.record(); setState('recording'); if (self.recordingDuration) { handleRecordingDuration(); } return self; } initRecorder(function() { if (self.recordingDuration) { handleRecordingDuration(); } }); return self; } function initRecorder(initCallback) { if (initCallback) { config.initCallback = function() { initCallback(); initCallback = config.initCallback = null; // recorder.initRecorder should be call-backed once. }; } var Recorder = new GetRecorderType(mediaStream, config); mediaRecorder = new Recorder(mediaStream, config); mediaRecorder.record(); setState('recording'); if (!config.disableLogs) { console.log('Initialized recorderType:', mediaRecorder.constructor.name, 'for output-type:', config.type); } } function stopRecording(callback) { callback = callback || function() {}; if (!mediaRecorder) { warningLog(); return; } if (self.state === 'paused') { self.resumeRecording(); setTimeout(function() { stopRecording(callback); }, 1); return; } if (self.state !== 'recording' && !config.disableLogs) { console.warn('Recording state should be: "recording", however current state is: ', self.state); } if (!config.disableLogs) { console.log('Stopped recording ' + config.type + ' stream.'); } if (config.type !== 'gif') { mediaRecorder.stop(_callback); } else { mediaRecorder.stop(); _callback(); } setState('stopped'); function _callback(__blob) { if (!mediaRecorder) { if (typeof callback.call === 'function') { callback.call(self, ''); } else { callback(''); } return; } Object.keys(mediaRecorder).forEach(function(key) { if (typeof mediaRecorder[key] === 'function') { return; } self[key] = mediaRecorder[key]; }); var blob = mediaRecorder.blob; if (!blob) { if (__blob) { mediaRecorder.blob = blob = __blob; } else { throw 'Recording failed.'; } } if (blob && !config.disableLogs) { console.log(blob.type, '->', bytesToSize(blob.size)); } if (callback) { var url; try { url = URL.createObjectURL(blob); } catch (e) {} if (typeof callback.call === 'function') { callback.call(self, url); } else { callback(url); } } if (!config.autoWriteToDisk) { return; } getDataURL(function(dataURL) { var parameter = {}; parameter[config.type + 'Blob'] = dataURL; DiskStorage.Store(parameter); }); } } function pauseRecording() { if (!mediaRecorder) { warningLog(); return; } if (self.state !== 'recording') { if (!config.disableLogs) { console.warn('Unable to pause the recording. Recording state: ', self.state); } return; } setState('paused'); mediaRecorder.pause(); if (!config.disableLogs) { console.log('Paused recording.'); } } function resumeRecording() { if (!mediaRecorder) { warningLog(); return; } if (self.state !== 'paused') { if (!config.disableLogs) { console.warn('Unable to resume the recording. Recording state: ', self.state); } return; } setState('recording'); // not all libs have this method yet mediaRecorder.resume(); if (!config.disableLogs) { console.log('Resumed recording.'); } } function readFile(_blob) { postMessage(new FileReaderSync().readAsDataURL(_blob)); } function getDataURL(callback, _mediaRecorder) { if (!callback) { throw 'Pass a callback function over getDataURL.'; } var blob = _mediaRecorder ? _mediaRecorder.blob : (mediaRecorder || {}).blob; if (!blob) { if (!config.disableLogs) { console.warn('Blob encoder did not finish its job yet.'); } setTimeout(function() { getDataURL(callback, _mediaRecorder); }, 1000); return; } if (typeof Worker !== 'undefined' && !navigator.mozGetUserMedia) { var webWorker = processInWebWorker(readFile); webWorker.onmessage = function(event) { callback(event.data); }; webWorker.postMessage(blob); } else { var reader = new FileReader(); reader.readAsDataURL(blob); reader.onload = function(event) { callback(event.target.result); }; } function processInWebWorker(_function) { try { var blob = URL.createObjectURL(new Blob([_function.toString(), 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}' ], { type: 'application/javascript' })); var worker = new Worker(blob); URL.revokeObjectURL(blob); return worker; } catch (e) {} } } function handleRecordingDuration(counter) { counter = counter || 0; if (self.state === 'paused') { setTimeout(function() { handleRecordingDuration(counter); }, 1000); return; } if (self.state === 'stopped') { return; } if (counter >= self.recordingDuration) { stopRecording(self.onRecordingStopped); return; } counter += 1000; // 1-second setTimeout(function() { handleRecordingDuration(counter); }, 1000); } function setState(state) { if (!self) { return; } self.state = state; if (typeof self.onStateChanged.call === 'function') { self.onStateChanged.call(self, state); } else { self.onStateChanged(state); } } var WARNING = 'It seems that recorder is destroyed or "startRecording" is not invoked for ' + config.type + ' recorder.'; function warningLog() { if (config.disableLogs === true) { return; } console.warn(WARNING); } var mediaRecorder; var returnObject = { /** * This method starts the recording. * @method * @memberof RecordRTC * @instance * @example * var recorder = RecordRTC(mediaStream, { * type: 'video' * }); * recorder.startRecording(); */ startRecording: startRecording, /** * This method stops the recording. It is strongly recommended to get "blob" or "URI" inside the callback to make sure all recorders finished their job. * @param {function} callback - Callback to get the recorded blob. * @method * @memberof RecordRTC * @instance * @example * recorder.stopRecording(function() { * // use either "this" or "recorder" object; both are identical * video.src = this.toURL(); * var blob = this.getBlob(); * }); */ stopRecording: stopRecording, /** * This method pauses the recording. You can resume recording using "resumeRecording" method. * @method * @memberof RecordRTC * @instance * @todo Firefox is unable to pause the recording. Fix it. * @example * recorder.pauseRecording(); // pause the recording * recorder.resumeRecording(); // resume again */ pauseRecording: pauseRecording, /** * This method resumes the recording. * @method * @memberof RecordRTC * @instance * @example * recorder.pauseRecording(); // first of all, pause the recording * recorder.resumeRecording(); // now resume it */ resumeRecording: resumeRecording, /** * This method initializes the recording. * @method * @memberof RecordRTC * @instance * @todo This method should be deprecated. * @example * recorder.initRecorder(); */ initRecorder: initRecorder, /** * Ask RecordRTC to auto-stop the recording after 5 minutes. * @method * @memberof RecordRTC * @instance * @example * var fiveMinutes = 5 * 1000 * 60; * recorder.setRecordingDuration(fiveMinutes, function() { * var blob = this.getBlob(); * video.src = this.toURL(); * }); * * // or otherwise * recorder.setRecordingDuration(fiveMinutes).onRecordingStopped(function() { * var blob = this.getBlob(); * video.src = this.toURL(); * }); */ setRecordingDuration: function(recordingDuration, callback) { if (typeof recordingDuration === 'undefined') { throw 'recordingDuration is required.'; } if (typeof recordingDuration !== 'number') { throw 'recordingDuration must be a number.'; } self.recordingDuration = recordingDuration; self.onRecordingStopped = callback || function() {}; return { onRecordingStopped: function(callback) { self.onRecordingStopped = callback; } }; }, /** * This method can be used to clear/reset all the recorded data. * @method * @memberof RecordRTC * @instance * @todo Figure out the difference between "reset" and "clearRecordedData" methods. * @example * recorder.clearRecordedData(); */ clearRecordedData: function() { if (!mediaRecorder) { warningLog(); return; } mediaRecorder.clearRecordedData(); if (!config.disableLogs) { console.log('Cleared old recorded data.'); } }, /** * Get the recorded blob. Use this method inside the "stopRecording" callback. * @method * @memberof RecordRTC * @instance * @example * recorder.stopRecording(function() { * var blob = this.getBlob(); * * var file = new File([blob], 'filename.webm', { * type: 'video/webm' * }); * * var formData = new FormData(); * formData.append('file', file); // upload "File" object rather than a "Blob" * uploadToServer(formData); * }); * @returns {Blob} Returns recorded data as "Blob" object. */ getBlob: function() { if (!mediaRecorder) { warningLog(); return; } return mediaRecorder.blob; }, /** * Get data-URI instead of Blob. * @param {function} callback - Callback to get the Data-URI. * @method * @memberof RecordRTC * @instance * @example * recorder.stopRecording(function() { * recorder.getDataURL(function(dataURI) { * video.src = dataURI; * }); * }); */ getDataURL: getDataURL, /** * Get virtual/temporary URL. Usage of this URL is limited to current tab. * @method * @memberof RecordRTC * @instance * @example * recorder.stopRecording(function() { * video.src = this.toURL(); * }); * @returns {String} Returns a virtual/temporary URL for the recorded "Blob". */ toURL: function() { if (!mediaRecorder) { warningLog(); return; } return URL.createObjectURL(mediaRecorder.blob); }, /** * Get internal recording object (i.e. internal module) e.g. MutliStreamRecorder, MediaStreamRecorder, StereoAudioRecorder or WhammyRecorder etc. * @method * @memberof RecordRTC * @instance * @example * var internalRecorder = recorder.getInternalRecorder(); * if(internalRecorder instanceof MultiStreamRecorder) { * internalRecorder.addStreams([newAudioStream]); * internalRecorder.resetVideoStreams([screenStream]); * } * @returns {Object} Returns internal recording object. */ getInternalRecorder: function() { return mediaRecorder; }, /** * Invoke save-as dialog to save the recorded blob into your disk. * @param {string} fileName - Set your own file name. * @method * @memberof RecordRTC * @instance * @example * recorder.stopRecording(function() { * this.save('file-name'); * * // or manually: * invokeSaveAsDialog(this.getBlob(), 'filename.webm'); * }); */ save: function(fileName) { if (!mediaRecorder) { warningLog(); return; } invokeSaveAsDialog(mediaRecorder.blob, fileName); }, /** * This method gets a blob from indexed-DB storage. * @param {function} callback - Callback to get the recorded blob. * @method * @memberof RecordRTC * @instance * @example * recorder.getFromDisk(function(dataURL) { * video.src = dataURL; * }); */ getFromDisk: function(callback) { if (!mediaRecorder) { warningLog(); return; } RecordRTC.getFromDisk(config.type, callback); }, /** * This method appends an array of webp images to the recorded video-blob. It takes an "array" object. * @type {Array.} * @param {Array} arrayOfWebPImages - Array of webp images. * @method * @memberof RecordRTC * @instance * @todo This method should be deprecated. * @example * var arrayOfWebPImages = []; * arrayOfWebPImages.push({ * duration: index, * image: 'data:image/webp;base64,...' * }); * recorder.setAdvertisementArray(arrayOfWebPImages); */ setAdvertisementArray: function(arrayOfWebPImages) { config.advertisement = []; var length = arrayOfWebPImages.length; for (var i = 0; i < length; i++) { config.advertisement.push({ duration: i, image: arrayOfWebPImages[i] }); } }, /** * It is equivalent to "recorder.getBlob()" method. Usage of "getBlob" is recommended, though. * @property {Blob} blob - Recorded Blob can be accessed using this property. * @memberof RecordRTC * @instance * @readonly * @example * recorder.stopRecording(function() { * var blob = this.blob; * * // below one is recommended * var blob = this.getBlob(); * }); */ blob: null, /** * This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates. * @property {number} bufferSize - Buffer-size used to encode the WAV container * @memberof RecordRTC * @instance * @readonly * @example * recorder.stopRecording(function() { * alert('Recorder used this buffer-size: ' + this.bufferSize); * }); */ bufferSize: 0, /** * This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates. * @property {number} sampleRate - Sample-rates used to encode the WAV container * @memberof RecordRTC * @instance * @readonly * @example * recorder.stopRecording(function() { * alert('Recorder used these sample-rates: ' + this.sampleRate); * }); */ sampleRate: 0, /** * {recorderType:StereoAudioRecorder} returns ArrayBuffer object. * @property {ArrayBuffer} buffer - Audio ArrayBuffer, supported only in Chrome. * @memberof RecordRTC * @instance * @readonly * @example * recorder.stopRecording(function() { * var arrayBuffer = this.buffer; * alert(arrayBuffer.byteLength); * }); */ buffer: null, /** * This method resets the recorder. So that you can reuse single recorder instance many times. * @method * @memberof RecordRTC * @instance * @example * recorder.reset(); * recorder.startRecording(); */ reset: function() { if (self.state === 'recording' && !config.disableLogs) { console.warn('Stop an active recorder.'); } if (mediaRecorder && typeof mediaRecorder.clearRecordedData === 'function') { mediaRecorder.clearRecordedData(); } mediaRecorder = null; setState('inactive'); self.blob = null; }, /** * This method is called whenever recorder's state changes. Use this as an "event". * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive. * @method * @memberof RecordRTC * @instance * @example * recorder.onStateChanged = function(state) { * console.log('Recorder state: ', state); * }; */ onStateChanged: function(state) { if (!config.disableLogs) { console.log('Recorder state changed:', state); } }, /** * A recorder can have inactive, recording, paused or stopped states. * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive. * @memberof RecordRTC * @static * @readonly * @example * // this looper function will keep you updated about the recorder's states. * (function looper() { * document.querySelector('h1').innerHTML = 'Recorder\'s state is: ' + recorder.state; * if(recorder.state === 'stopped') return; // ignore+stop * setTimeout(looper, 1000); // update after every 3-seconds * })(); * recorder.startRecording(); */ state: 'inactive', /** * Get recorder's readonly state. * @method * @memberof RecordRTC * @example * var state = recorder.getState(); * @returns {String} Returns recording state. */ getState: function() { return self.state; }, /** * Destroy RecordRTC instance. Clear all recorders and objects. * @method * @memberof RecordRTC * @example * recorder.destroy(); */ destroy: function() { var disableLogsCache = config.disableLogs; config = { disableLogs: true }; self.reset(); setState('destroyed'); returnObject = self = null; if (Storage.AudioContextConstructor) { Storage.AudioContextConstructor.close(); Storage.AudioContextConstructor = null; } config.disableLogs = disableLogsCache; if (!config.disableLogs) { console.log('RecordRTC is destroyed.'); } }, /** * RecordRTC version number * @property {String} version - Release version number. * @memberof RecordRTC * @static * @readonly * @example * alert(recorder.version); */ version: '5.6.2' }; if (!this) { self = returnObject; return returnObject; } // if someone wants to use RecordRTC with the "new" keyword. for (var prop in returnObject) { this[prop] = returnObject[prop]; } self = this; return returnObject; } RecordRTC.version = '5.6.2'; { module.exports = RecordRTC; } RecordRTC.getFromDisk = function(type, callback) { if (!callback) { throw 'callback is mandatory.'; } console.log('Getting recorded ' + (type === 'all' ? 'blobs' : type + ' blob ') + ' from disk!'); DiskStorage.Fetch(function(dataURL, _type) { if (type !== 'all' && _type === type + 'Blob' && callback) { callback(dataURL); } if (type === 'all' && callback) { callback(dataURL, _type.replace('Blob', '')); } }); }; /** * This method can be used to store recorded blobs into IndexedDB storage. * @param {object} options - {audio: Blob, video: Blob, gif: Blob} * @method * @memberof RecordRTC * @example * RecordRTC.writeToDisk({ * audio: audioBlob, * video: videoBlob, * gif : gifBlob * }); */ RecordRTC.writeToDisk = function(options) { console.log('Writing recorded blob(s) to disk!'); options = options || {}; if (options.audio && options.video && options.gif) { options.audio.getDataURL(function(audioDataURL) { options.video.getDataURL(function(videoDataURL) { options.gif.getDataURL(function(gifDataURL) { DiskStorage.Store({ audioBlob: audioDataURL, videoBlob: videoDataURL, gifBlob: gifDataURL }); }); }); }); } else if (options.audio && options.video) { options.audio.getDataURL(function(audioDataURL) { options.video.getDataURL(function(videoDataURL) { DiskStorage.Store({ audioBlob: audioDataURL, videoBlob: videoDataURL }); }); }); } else if (options.audio && options.gif) { options.audio.getDataURL(function(audioDataURL) { options.gif.getDataURL(function(gifDataURL) { DiskStorage.Store({ audioBlob: audioDataURL, gifBlob: gifDataURL }); }); }); } else if (options.video && options.gif) { options.video.getDataURL(function(videoDataURL) { options.gif.getDataURL(function(gifDataURL) { DiskStorage.Store({ videoBlob: videoDataURL, gifBlob: gifDataURL }); }); }); } else if (options.audio) { options.audio.getDataURL(function(audioDataURL) { DiskStorage.Store({ audioBlob: audioDataURL }); }); } else if (options.video) { options.video.getDataURL(function(videoDataURL) { DiskStorage.Store({ videoBlob: videoDataURL }); }); } else if (options.gif) { options.gif.getDataURL(function(gifDataURL) { DiskStorage.Store({ gifBlob: gifDataURL }); }); } }; // __________________________ // RecordRTC-Configuration.js /** * {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}. * @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef RecordRTCConfiguration * @class * @example * var options = RecordRTCConfiguration(mediaStream, options); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.} */ function RecordRTCConfiguration(mediaStream, config) { if (!config.recorderType && !config.type) { if (!!config.audio && !!config.video) { config.type = 'video'; } else if (!!config.audio && !config.video) { config.type = 'audio'; } } if (config.recorderType && !config.type) { if (config.recorderType === WhammyRecorder || config.recorderType === CanvasRecorder || (typeof WebAssemblyRecorder !== 'undefined' && config.recorderType === WebAssemblyRecorder)) { config.type = 'video'; } else if (config.recorderType === GifRecorder) { config.type = 'gif'; } else if (config.recorderType === StereoAudioRecorder) { config.type = 'audio'; } else if (config.recorderType === MediaStreamRecorder) { if (getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) { config.type = 'video'; } else if (!getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) { config.type = 'video'; } else if (getTracks(mediaStream, 'audio').length && !getTracks(mediaStream, 'video').length) { config.type = 'audio'; } else ; } } if (typeof MediaStreamRecorder !== 'undefined' && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) { if (!config.mimeType) { config.mimeType = 'video/webm'; } if (!config.type) { config.type = config.mimeType.split('/')[0]; } if (!config.bitsPerSecond) ; } // consider default type=audio if (!config.type) { if (config.mimeType) { config.type = config.mimeType.split('/')[0]; } if (!config.type) { config.type = 'audio'; } } return config; } // __________________ // GetRecorderType.js /** * {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}. * @summary It returns best recorder-type available for your browser. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef GetRecorderType * @class * @example * var RecorderType = GetRecorderType(options); * var recorder = new RecorderType(options); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.} */ function GetRecorderType(mediaStream, config) { var recorder; // StereoAudioRecorder can work with all three: Edge, Firefox and Chrome // todo: detect if it is Edge, then auto use: StereoAudioRecorder if (isChrome || isEdge || isOpera) { // Media Stream Recording API has not been implemented in chrome yet; // That's why using WebAudio API to record stereo audio in WAV format recorder = StereoAudioRecorder; } if (typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype && !isChrome) { recorder = MediaStreamRecorder; } // video recorder (in WebM format) if (config.type === 'video' && (isChrome || isOpera)) { recorder = WhammyRecorder; if (typeof WebAssemblyRecorder !== 'undefined' && typeof ReadableStream !== 'undefined') { recorder = WebAssemblyRecorder; } } // video recorder (in Gif format) if (config.type === 'gif') { recorder = GifRecorder; } // html2canvas recording! if (config.type === 'canvas') { recorder = CanvasRecorder; } if (isMediaRecorderCompatible() && recorder !== CanvasRecorder && recorder !== GifRecorder && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) { if (getTracks(mediaStream, 'video').length || getTracks(mediaStream, 'audio').length) { // audio-only recording if (config.type === 'audio') { if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('audio/webm')) { recorder = MediaStreamRecorder; } // else recorder = StereoAudioRecorder; } else { // video or screen tracks if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('video/webm')) { recorder = MediaStreamRecorder; } } } } if (mediaStream instanceof Array && mediaStream.length) { recorder = MultiStreamRecorder; } if (config.recorderType) { recorder = config.recorderType; } if (!config.disableLogs && !!recorder && !!recorder.name) { console.log('Using recorderType:', recorder.name || recorder.constructor.name); } if (!recorder && isSafari) { recorder = MediaStreamRecorder; } return recorder; } // _____________ // MRecordRTC.js /** * MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API. * @summary MRecordRTC stands for "Multiple-RecordRTC". * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef MRecordRTC * @class * @example * var recorder = new MRecordRTC(); * recorder.addStream(MediaStream); * recorder.mediaType = { * audio: true, // or StereoAudioRecorder or MediaStreamRecorder * video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder * gif: true // or GifRecorder * }; * // mimeType is optional and should be set only in advance cases. * recorder.mimeType = { * audio: 'audio/wav', * video: 'video/webm', * gif: 'image/gif' * }; * recorder.startRecording(); * @see For further information: * @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @requires {@link RecordRTC} */ function MRecordRTC(mediaStream) { /** * This method attaches MediaStream object to {@link MRecordRTC}. * @param {MediaStream} mediaStream - A MediaStream object, either fetched using getUserMedia API, or generated using captureStreamUntilEnded or WebAudio API. * @method * @memberof MRecordRTC * @example * recorder.addStream(MediaStream); */ this.addStream = function(_mediaStream) { if (_mediaStream) { mediaStream = _mediaStream; } }; /** * This property can be used to set the recording type e.g. audio, or video, or gif, or canvas. * @property {object} mediaType - {audio: true, video: true, gif: true} * @memberof MRecordRTC * @example * var recorder = new MRecordRTC(); * recorder.mediaType = { * audio: true, // TRUE or StereoAudioRecorder or MediaStreamRecorder * video: true, // TRUE or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder * gif : true // TRUE or GifRecorder * }; */ this.mediaType = { audio: true, video: true }; /** * This method starts recording. * @method * @memberof MRecordRTC * @example * recorder.startRecording(); */ this.startRecording = function() { var mediaType = this.mediaType; var recorderType; var mimeType = this.mimeType || { audio: null, video: null, gif: null }; if (typeof mediaType.audio !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'audio').length) { mediaType.audio = false; } if (typeof mediaType.video !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) { mediaType.video = false; } if (typeof mediaType.gif !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) { mediaType.gif = false; } if (!mediaType.audio && !mediaType.video && !mediaType.gif) { throw 'MediaStream must have either audio or video tracks.'; } if (!!mediaType.audio) { recorderType = null; if (typeof mediaType.audio === 'function') { recorderType = mediaType.audio; } this.audioRecorder = new RecordRTC(mediaStream, { type: 'audio', bufferSize: this.bufferSize, sampleRate: this.sampleRate, numberOfAudioChannels: this.numberOfAudioChannels || 2, disableLogs: this.disableLogs, recorderType: recorderType, mimeType: mimeType.audio, timeSlice: this.timeSlice, onTimeStamp: this.onTimeStamp }); if (!mediaType.video) { this.audioRecorder.startRecording(); } } if (!!mediaType.video) { recorderType = null; if (typeof mediaType.video === 'function') { recorderType = mediaType.video; } var newStream = mediaStream; if (isMediaRecorderCompatible() && !!mediaType.audio && typeof mediaType.audio === 'function') { var videoTrack = getTracks(mediaStream, 'video')[0]; if (isFirefox) { newStream = new MediaStream(); newStream.addTrack(videoTrack); if (recorderType && recorderType === WhammyRecorder) { // Firefox does NOT supports webp-encoding yet // But Firefox do supports WebAssemblyRecorder recorderType = MediaStreamRecorder; } } else { newStream = new MediaStream(); newStream.addTrack(videoTrack); } } this.videoRecorder = new RecordRTC(newStream, { type: 'video', video: this.video, canvas: this.canvas, frameInterval: this.frameInterval || 10, disableLogs: this.disableLogs, recorderType: recorderType, mimeType: mimeType.video, timeSlice: this.timeSlice, onTimeStamp: this.onTimeStamp, workerPath: this.workerPath, webAssemblyPath: this.webAssemblyPath, frameRate: this.frameRate, // used by WebAssemblyRecorder; values: usually 30; accepts any. bitrate: this.bitrate // used by WebAssemblyRecorder; values: 0 to 1000+ }); if (!mediaType.audio) { this.videoRecorder.startRecording(); } } if (!!mediaType.audio && !!mediaType.video) { var self = this; var isSingleRecorder = isMediaRecorderCompatible() === true; if (mediaType.audio instanceof StereoAudioRecorder && !!mediaType.video) { isSingleRecorder = false; } else if (mediaType.audio !== true && mediaType.video !== true && mediaType.audio !== mediaType.video) { isSingleRecorder = false; } if (isSingleRecorder === true) { self.audioRecorder = null; self.videoRecorder.startRecording(); } else { self.videoRecorder.initRecorder(function() { self.audioRecorder.initRecorder(function() { // Both recorders are ready to record things accurately self.videoRecorder.startRecording(); self.audioRecorder.startRecording(); }); }); } } if (!!mediaType.gif) { recorderType = null; if (typeof mediaType.gif === 'function') { recorderType = mediaType.gif; } this.gifRecorder = new RecordRTC(mediaStream, { type: 'gif', frameRate: this.frameRate || 200, quality: this.quality || 10, disableLogs: this.disableLogs, recorderType: recorderType, mimeType: mimeType.gif }); this.gifRecorder.startRecording(); } }; /** * This method stops recording. * @param {function} callback - Callback function is invoked when all encoders finished their jobs. * @method * @memberof MRecordRTC * @example * recorder.stopRecording(function(recording){ * var audioBlob = recording.audio; * var videoBlob = recording.video; * var gifBlob = recording.gif; * }); */ this.stopRecording = function(callback) { callback = callback || function() {}; if (this.audioRecorder) { this.audioRecorder.stopRecording(function(blobURL) { callback(blobURL, 'audio'); }); } if (this.videoRecorder) { this.videoRecorder.stopRecording(function(blobURL) { callback(blobURL, 'video'); }); } if (this.gifRecorder) { this.gifRecorder.stopRecording(function(blobURL) { callback(blobURL, 'gif'); }); } }; /** * This method pauses recording. * @method * @memberof MRecordRTC * @example * recorder.pauseRecording(); */ this.pauseRecording = function() { if (this.audioRecorder) { this.audioRecorder.pauseRecording(); } if (this.videoRecorder) { this.videoRecorder.pauseRecording(); } if (this.gifRecorder) { this.gifRecorder.pauseRecording(); } }; /** * This method resumes recording. * @method * @memberof MRecordRTC * @example * recorder.resumeRecording(); */ this.resumeRecording = function() { if (this.audioRecorder) { this.audioRecorder.resumeRecording(); } if (this.videoRecorder) { this.videoRecorder.resumeRecording(); } if (this.gifRecorder) { this.gifRecorder.resumeRecording(); } }; /** * This method can be used to manually get all recorded blobs. * @param {function} callback - All recorded blobs are passed back to the "callback" function. * @method * @memberof MRecordRTC * @example * recorder.getBlob(function(recording){ * var audioBlob = recording.audio; * var videoBlob = recording.video; * var gifBlob = recording.gif; * }); * // or * var audioBlob = recorder.getBlob().audio; * var videoBlob = recorder.getBlob().video; */ this.getBlob = function(callback) { var output = {}; if (this.audioRecorder) { output.audio = this.audioRecorder.getBlob(); } if (this.videoRecorder) { output.video = this.videoRecorder.getBlob(); } if (this.gifRecorder) { output.gif = this.gifRecorder.getBlob(); } if (callback) { callback(output); } return output; }; /** * Destroy all recorder instances. * @method * @memberof MRecordRTC * @example * recorder.destroy(); */ this.destroy = function() { if (this.audioRecorder) { this.audioRecorder.destroy(); this.audioRecorder = null; } if (this.videoRecorder) { this.videoRecorder.destroy(); this.videoRecorder = null; } if (this.gifRecorder) { this.gifRecorder.destroy(); this.gifRecorder = null; } }; /** * This method can be used to manually get all recorded blobs' DataURLs. * @param {function} callback - All recorded blobs' DataURLs are passed back to the "callback" function. * @method * @memberof MRecordRTC * @example * recorder.getDataURL(function(recording){ * var audioDataURL = recording.audio; * var videoDataURL = recording.video; * var gifDataURL = recording.gif; * }); */ this.getDataURL = function(callback) { this.getBlob(function(blob) { if (blob.audio && blob.video) { getDataURL(blob.audio, function(_audioDataURL) { getDataURL(blob.video, function(_videoDataURL) { callback({ audio: _audioDataURL, video: _videoDataURL }); }); }); } else if (blob.audio) { getDataURL(blob.audio, function(_audioDataURL) { callback({ audio: _audioDataURL }); }); } else if (blob.video) { getDataURL(blob.video, function(_videoDataURL) { callback({ video: _videoDataURL }); }); } }); function getDataURL(blob, callback00) { if (typeof Worker !== 'undefined') { var webWorker = processInWebWorker(function readFile(_blob) { postMessage(new FileReaderSync().readAsDataURL(_blob)); }); webWorker.onmessage = function(event) { callback00(event.data); }; webWorker.postMessage(blob); } else { var reader = new FileReader(); reader.readAsDataURL(blob); reader.onload = function(event) { callback00(event.target.result); }; } } function processInWebWorker(_function) { var blob = URL.createObjectURL(new Blob([_function.toString(), 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}' ], { type: 'application/javascript' })); var worker = new Worker(blob); var url; if (typeof URL !== 'undefined') { url = URL; } else if (typeof webkitURL !== 'undefined') { url = webkitURL; } else { throw 'Neither URL nor webkitURL detected.'; } url.revokeObjectURL(blob); return worker; } }; /** * This method can be used to ask {@link MRecordRTC} to write all recorded blobs into IndexedDB storage. * @method * @memberof MRecordRTC * @example * recorder.writeToDisk(); */ this.writeToDisk = function() { RecordRTC.writeToDisk({ audio: this.audioRecorder, video: this.videoRecorder, gif: this.gifRecorder }); }; /** * This method can be used to invoke a save-as dialog for all recorded blobs. * @param {object} args - {audio: 'audio-name', video: 'video-name', gif: 'gif-name'} * @method * @memberof MRecordRTC * @example * recorder.save({ * audio: 'audio-file-name', * video: 'video-file-name', * gif : 'gif-file-name' * }); */ this.save = function(args) { args = args || { audio: true, video: true, gif: true }; if (!!args.audio && this.audioRecorder) { this.audioRecorder.save(typeof args.audio === 'string' ? args.audio : ''); } if (!!args.video && this.videoRecorder) { this.videoRecorder.save(typeof args.video === 'string' ? args.video : ''); } if (!!args.gif && this.gifRecorder) { this.gifRecorder.save(typeof args.gif === 'string' ? args.gif : ''); } }; } /** * This method can be used to get all recorded blobs from IndexedDB storage. * @param {string} type - 'all' or 'audio' or 'video' or 'gif' * @param {function} callback - Callback function to get all stored blobs. * @method * @memberof MRecordRTC * @example * MRecordRTC.getFromDisk('all', function(dataURL, type){ * if(type === 'audio') { } * if(type === 'video') { } * if(type === 'gif') { } * }); */ MRecordRTC.getFromDisk = RecordRTC.getFromDisk; /** * This method can be used to store recorded blobs into IndexedDB storage. * @param {object} options - {audio: Blob, video: Blob, gif: Blob} * @method * @memberof MRecordRTC * @example * MRecordRTC.writeToDisk({ * audio: audioBlob, * video: videoBlob, * gif : gifBlob * }); */ MRecordRTC.writeToDisk = RecordRTC.writeToDisk; if (typeof RecordRTC !== 'undefined') { RecordRTC.MRecordRTC = MRecordRTC; } var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45'; (function(that) { if (!that) { return; } if (typeof window !== 'undefined') { return; } if (typeof commonjsGlobal === 'undefined') { return; } commonjsGlobal.navigator = { userAgent: browserFakeUserAgent, getUserMedia: function() {} }; if (!commonjsGlobal.console) { commonjsGlobal.console = {}; } if (typeof commonjsGlobal.console.log === 'undefined' || typeof commonjsGlobal.console.error === 'undefined') { commonjsGlobal.console.error = commonjsGlobal.console.log = commonjsGlobal.console.log || function() { console.log(arguments); }; } if (typeof document === 'undefined') { /*global document:true */ that.document = { documentElement: { appendChild: function() { return ''; } } }; document.createElement = document.captureStream = document.mozCaptureStream = function() { var obj = { getContext: function() { return obj; }, play: function() {}, pause: function() {}, drawImage: function() {}, toDataURL: function() { return ''; }, style: {} }; return obj; }; that.HTMLVideoElement = function() {}; } if (typeof location === 'undefined') { /*global location:true */ that.location = { protocol: 'file:', href: '', hash: '' }; } if (typeof screen === 'undefined') { /*global screen:true */ that.screen = { width: 0, height: 0 }; } if (typeof URL === 'undefined') { /*global screen:true */ that.URL = { createObjectURL: function() { return ''; }, revokeObjectURL: function() { return ''; } }; } /*global window:true */ that.window = commonjsGlobal; })(typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : null); // _____________________________ // Cross-Browser-Declarations.js // animation-frame used in WebM recording /*jshint -W079 */ var requestAnimationFrame = window.requestAnimationFrame; if (typeof requestAnimationFrame === 'undefined') { if (typeof webkitRequestAnimationFrame !== 'undefined') { /*global requestAnimationFrame:true */ requestAnimationFrame = webkitRequestAnimationFrame; } else if (typeof mozRequestAnimationFrame !== 'undefined') { /*global requestAnimationFrame:true */ requestAnimationFrame = mozRequestAnimationFrame; } else if (typeof msRequestAnimationFrame !== 'undefined') { /*global requestAnimationFrame:true */ requestAnimationFrame = msRequestAnimationFrame; } else if (typeof requestAnimationFrame === 'undefined') { // via: https://gist.github.com/paulirish/1579671 var lastTime = 0; /*global requestAnimationFrame:true */ requestAnimationFrame = function(callback, element) { var currTime = new Date().getTime(); var timeToCall = Math.max(0, 16 - (currTime - lastTime)); var id = setTimeout(function() { callback(currTime + timeToCall); }, timeToCall); lastTime = currTime + timeToCall; return id; }; } } /*jshint -W079 */ var cancelAnimationFrame = window.cancelAnimationFrame; if (typeof cancelAnimationFrame === 'undefined') { if (typeof webkitCancelAnimationFrame !== 'undefined') { /*global cancelAnimationFrame:true */ cancelAnimationFrame = webkitCancelAnimationFrame; } else if (typeof mozCancelAnimationFrame !== 'undefined') { /*global cancelAnimationFrame:true */ cancelAnimationFrame = mozCancelAnimationFrame; } else if (typeof msCancelAnimationFrame !== 'undefined') { /*global cancelAnimationFrame:true */ cancelAnimationFrame = msCancelAnimationFrame; } else if (typeof cancelAnimationFrame === 'undefined') { /*global cancelAnimationFrame:true */ cancelAnimationFrame = function(id) { clearTimeout(id); }; } } // WebAudio API representer var AudioContext = window.AudioContext; if (typeof AudioContext === 'undefined') { if (typeof webkitAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = webkitAudioContext; } if (typeof mozAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = mozAudioContext; } } /*jshint -W079 */ var URL = window.URL; if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') { /*global URL:true */ URL = webkitURL; } if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator? if (typeof navigator.webkitGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.webkitGetUserMedia; } if (typeof navigator.mozGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.mozGetUserMedia; } } var isEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob); var isOpera = !!window.opera || navigator.userAgent.indexOf('OPR/') !== -1; var isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1 && ('netscape' in window) && / rv:/.test(navigator.userAgent); var isChrome = (!isOpera && !isEdge && !!navigator.webkitGetUserMedia) || isElectron() || navigator.userAgent.toLowerCase().indexOf('chrome/') !== -1; var isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent); if (isSafari && !isChrome && navigator.userAgent.indexOf('CriOS') !== -1) { isSafari = false; isChrome = true; } var MediaStream = window.MediaStream; if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') { MediaStream = webkitMediaStream; } /*global MediaStream:true */ if (typeof MediaStream !== 'undefined') { // override "stop" method for all browsers if (typeof MediaStream.prototype.stop === 'undefined') { MediaStream.prototype.stop = function() { this.getTracks().forEach(function(track) { track.stop(); }); }; } } // below function via: http://goo.gl/B3ae8c /** * Return human-readable file size. * @param {number} bytes - Pass bytes and get formatted string. * @returns {string} - formatted string * @example * bytesToSize(1024*1024*5) === '5 GB' * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ function bytesToSize(bytes) { var k = 1000; var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; if (bytes === 0) { return '0 Bytes'; } var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10); return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i]; } /** * @param {Blob} file - File or Blob object. This parameter is required. * @param {string} fileName - Optional file name e.g. "Recorded-Video.webm" * @example * invokeSaveAsDialog(blob or file, [optional] fileName); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ function invokeSaveAsDialog(file, fileName) { if (!file) { throw 'Blob object is required.'; } if (!file.type) { try { file.type = 'video/webm'; } catch (e) {} } var fileExtension = (file.type || 'video/webm').split('/')[1]; if (fileExtension.indexOf(';') !== -1) { // extended mimetype, e.g. 'video/webm;codecs=vp8,opus' fileExtension = fileExtension.split(';')[0]; } if (fileName && fileName.indexOf('.') !== -1) { var splitted = fileName.split('.'); fileName = splitted[0]; fileExtension = splitted[1]; } var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension; if (typeof navigator.msSaveOrOpenBlob !== 'undefined') { return navigator.msSaveOrOpenBlob(file, fileFullName); } else if (typeof navigator.msSaveBlob !== 'undefined') { return navigator.msSaveBlob(file, fileFullName); } var hyperlink = document.createElement('a'); hyperlink.href = URL.createObjectURL(file); hyperlink.download = fileFullName; hyperlink.style = 'display:none;opacity:0;color:transparent;'; (document.body || document.documentElement).appendChild(hyperlink); if (typeof hyperlink.click === 'function') { hyperlink.click(); } else { hyperlink.target = '_blank'; hyperlink.dispatchEvent(new MouseEvent('click', { view: window, bubbles: true, cancelable: true })); } URL.revokeObjectURL(hyperlink.href); } /** * from: https://github.com/cheton/is-electron/blob/master/index.js **/ function isElectron() { // Renderer process if (typeof window !== 'undefined' && typeof window.process === 'object' && window.process.type === 'renderer') { return true; } // Main process if (typeof process !== 'undefined' && typeof process.versions === 'object' && !!process.versions.electron) { return true; } // Detect the user agent when the `nodeIntegration` option is set to true if (typeof navigator === 'object' && typeof navigator.userAgent === 'string' && navigator.userAgent.indexOf('Electron') >= 0) { return true; } return false; } function getTracks(stream, kind) { if (!stream || !stream.getTracks) { return []; } return stream.getTracks().filter(function(t) { return t.kind === (kind || 'audio'); }); } function setSrcObject(stream, element) { if ('srcObject' in element) { element.srcObject = stream; } else if ('mozSrcObject' in element) { element.mozSrcObject = stream; } else { element.srcObject = stream; } } /** * @param {Blob} file - File or Blob object. * @param {function} callback - Callback function. * @example * getSeekableBlob(blob or file, callback); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ function getSeekableBlob(inputBlob, callback) { // EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml if (typeof EBML === 'undefined') { throw new Error('Please link: https://www.webrtc-experiment.com/EBML.js'); } var reader = new EBML.Reader(); var decoder = new EBML.Decoder(); var tools = EBML.tools; var fileReader = new FileReader(); fileReader.onload = function(e) { var ebmlElms = decoder.decode(this.result); ebmlElms.forEach(function(element) { reader.read(element); }); reader.stop(); var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues); var body = this.result.slice(reader.metadataSize); var newBlob = new Blob([refinedMetadataBuf, body], { type: 'video/webm' }); callback(newBlob); }; fileReader.readAsArrayBuffer(inputBlob); } if (typeof RecordRTC !== 'undefined') { RecordRTC.invokeSaveAsDialog = invokeSaveAsDialog; RecordRTC.getTracks = getTracks; RecordRTC.getSeekableBlob = getSeekableBlob; RecordRTC.bytesToSize = bytesToSize; RecordRTC.isElectron = isElectron; } // __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129 // Storage.js /** * Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext". * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @example * Storage.AudioContext === webkitAudioContext * @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ var Storage = {}; if (typeof AudioContext !== 'undefined') { Storage.AudioContext = AudioContext; } else if (typeof webkitAudioContext !== 'undefined') { Storage.AudioContext = webkitAudioContext; } if (typeof RecordRTC !== 'undefined') { RecordRTC.Storage = Storage; } function isMediaRecorderCompatible() { if (isFirefox || isSafari || isEdge) { return true; } var nAgt = navigator.userAgent; var fullVersion = '' + parseFloat(navigator.appVersion); var majorVersion = parseInt(navigator.appVersion, 10); var verOffset, ix; if (isChrome || isOpera) { verOffset = nAgt.indexOf('Chrome'); fullVersion = nAgt.substring(verOffset + 7); } // trim the fullVersion string at semicolon/space if present if ((ix = fullVersion.indexOf(';')) !== -1) { fullVersion = fullVersion.substring(0, ix); } if ((ix = fullVersion.indexOf(' ')) !== -1) { fullVersion = fullVersion.substring(0, ix); } majorVersion = parseInt('' + fullVersion, 10); if (isNaN(majorVersion)) { fullVersion = '' + parseFloat(navigator.appVersion); majorVersion = parseInt(navigator.appVersion, 10); } return majorVersion >= 49; } // ______________________ // MediaStreamRecorder.js /** * MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox. * @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://github.com/muaz-khan|Muaz Khan} * @typedef MediaStreamRecorder * @class * @example * var config = { * mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis * audioBitsPerSecond : 256 * 8 * 1024, * videoBitsPerSecond : 256 * 8 * 1024, * bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two * checkForInactiveTracks: true, * timeSlice: 1000, // concatenate intervals based blobs * ondataavailable: function() {} // get intervals based blobs * } * var recorder = new MediaStreamRecorder(mediaStream, config); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * * // or * var blob = recorder.blob; * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", timeSlice: 1000} * @throws Will throw an error if first argument "MediaStream" is missing. Also throws error if "MediaRecorder API" are not supported by the browser. */ function MediaStreamRecorder(mediaStream, config) { var self = this; if (typeof mediaStream === 'undefined') { throw 'First argument "MediaStream" is required.'; } if (typeof MediaRecorder === 'undefined') { throw 'Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.'; } config = config || { // bitsPerSecond: 256 * 8 * 1024, mimeType: 'video/webm' }; if (config.type === 'audio') { if (getTracks(mediaStream, 'video').length && getTracks(mediaStream, 'audio').length) { var stream; if (!!navigator.mozGetUserMedia) { stream = new MediaStream(); stream.addTrack(getTracks(mediaStream, 'audio')[0]); } else { // webkitMediaStream stream = new MediaStream(getTracks(mediaStream, 'audio')); } mediaStream = stream; } if (!config.mimeType || config.mimeType.toString().toLowerCase().indexOf('audio') === -1) { config.mimeType = isChrome ? 'audio/webm' : 'audio/ogg'; } if (config.mimeType && config.mimeType.toString().toLowerCase() !== 'audio/ogg' && !!navigator.mozGetUserMedia) { // forcing better codecs on Firefox (via #166) config.mimeType = 'audio/ogg'; } } var arrayOfBlobs = []; /** * This method returns array of blobs. Use only with "timeSlice". Its useful to preview recording anytime, without using the "stop" method. * @method * @memberof MediaStreamRecorder * @example * var arrayOfBlobs = recorder.getArrayOfBlobs(); * @returns {Array} Returns array of recorded blobs. */ this.getArrayOfBlobs = function() { return arrayOfBlobs; }; /** * This method records MediaStream. * @method * @memberof MediaStreamRecorder * @example * recorder.record(); */ this.record = function() { // set defaults self.blob = null; self.clearRecordedData(); self.timestamps = []; allStates = []; arrayOfBlobs = []; var recorderHints = config; if (!config.disableLogs) { console.log('Passing following config over MediaRecorder API.', recorderHints); } if (mediaRecorder) { // mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page. mediaRecorder = null; } if (isChrome && !isMediaRecorderCompatible()) { // to support video-only recording on stable recorderHints = 'video/vp8'; } if (typeof MediaRecorder.isTypeSupported === 'function' && recorderHints.mimeType) { if (!MediaRecorder.isTypeSupported(recorderHints.mimeType)) { if (!config.disableLogs) { console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType); } recorderHints.mimeType = config.type === 'audio' ? 'audio/webm' : 'video/webm'; } } // using MediaRecorder API here try { mediaRecorder = new MediaRecorder(mediaStream, recorderHints); // reset config.mimeType = recorderHints.mimeType; } catch (e) { // chrome-based fallback mediaRecorder = new MediaRecorder(mediaStream); } // old hack? if (recorderHints.mimeType && !MediaRecorder.isTypeSupported && 'canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(recorderHints.mimeType) === false) { if (!config.disableLogs) { console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType); } } // Dispatching OnDataAvailable Handler mediaRecorder.ondataavailable = function(e) { if (e.data) { allStates.push('ondataavailable: ' + bytesToSize(e.data.size)); } if (typeof config.timeSlice === 'number') { if (e.data && e.data.size) { arrayOfBlobs.push(e.data); updateTimeStamp(); if (typeof config.ondataavailable === 'function') { // intervals based blobs var blob = config.getNativeBlob ? e.data : new Blob([e.data], { type: getMimeType(recorderHints) }); config.ondataavailable(blob); } } return; } if (!e.data || !e.data.size || e.data.size < 100 || self.blob) { // make sure that stopRecording always getting fired // even if there is invalid data if (self.recordingCallback) { self.recordingCallback(new Blob([], { type: getMimeType(recorderHints) })); self.recordingCallback = null; } return; } self.blob = config.getNativeBlob ? e.data : new Blob([e.data], { type: getMimeType(recorderHints) }); if (self.recordingCallback) { self.recordingCallback(self.blob); self.recordingCallback = null; } }; mediaRecorder.onstart = function() { allStates.push('started'); }; mediaRecorder.onpause = function() { allStates.push('paused'); }; mediaRecorder.onresume = function() { allStates.push('resumed'); }; mediaRecorder.onstop = function() { allStates.push('stopped'); }; mediaRecorder.onerror = function(error) { if (!error) { return; } if (!error.name) { error.name = 'UnknownError'; } allStates.push('error: ' + error); if (!config.disableLogs) { // via: https://w3c.github.io/mediacapture-record/MediaRecorder.html#exception-summary if (error.name.toString().toLowerCase().indexOf('invalidstate') !== -1) { console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.', error); } else if (error.name.toString().toLowerCase().indexOf('notsupported') !== -1) { console.error('MIME type (', recorderHints.mimeType, ') is not supported.', error); } else if (error.name.toString().toLowerCase().indexOf('security') !== -1) { console.error('MediaRecorder security error', error); } // older code below else if (error.name === 'OutOfMemory') { console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.', error); } else if (error.name === 'IllegalStreamModification') { console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.', error); } else if (error.name === 'OtherRecordingError') { console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.', error); } else if (error.name === 'GenericError') { console.error('The UA cannot provide the codec or recording option that has been requested.', error); } else { console.error('MediaRecorder Error', error); } } (function(looper) { if (!self.manuallyStopped && mediaRecorder && mediaRecorder.state === 'inactive') { delete config.timeslice; // 10 minutes, enough? mediaRecorder.start(10 * 60 * 1000); return; } setTimeout(looper, 1000); })(); if (mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') { mediaRecorder.stop(); } }; if (typeof config.timeSlice === 'number') { updateTimeStamp(); mediaRecorder.start(config.timeSlice); } else { // default is 60 minutes; enough? // use config => {timeSlice: 1000} otherwise mediaRecorder.start(3.6e+6); } if (config.initCallback) { config.initCallback(); // old code } }; /** * @property {Array} timestamps - Array of time stamps * @memberof MediaStreamRecorder * @example * console.log(recorder.timestamps); */ this.timestamps = []; function updateTimeStamp() { self.timestamps.push(new Date().getTime()); if (typeof config.onTimeStamp === 'function') { config.onTimeStamp(self.timestamps[self.timestamps.length - 1], self.timestamps); } } function getMimeType(secondObject) { if (mediaRecorder && mediaRecorder.mimeType) { return mediaRecorder.mimeType; } return secondObject.mimeType || 'video/webm'; } /** * This method stops recording MediaStream. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof MediaStreamRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { callback = callback || function() {}; self.manuallyStopped = true; // used inside the mediaRecorder.onerror if (!mediaRecorder) { return; } this.recordingCallback = callback; if (mediaRecorder.state === 'recording') { mediaRecorder.stop(); } if (typeof config.timeSlice === 'number') { setTimeout(function() { self.blob = new Blob(arrayOfBlobs, { type: getMimeType(config) }); self.recordingCallback(self.blob); }, 100); } }; /** * This method pauses the recording process. * @method * @memberof MediaStreamRecorder * @example * recorder.pause(); */ this.pause = function() { if (!mediaRecorder) { return; } if (mediaRecorder.state === 'recording') { mediaRecorder.pause(); } }; /** * This method resumes the recording process. * @method * @memberof MediaStreamRecorder * @example * recorder.resume(); */ this.resume = function() { if (!mediaRecorder) { return; } if (mediaRecorder.state === 'paused') { mediaRecorder.resume(); } }; /** * This method resets currently recorded data. * @method * @memberof MediaStreamRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { if (mediaRecorder && mediaRecorder.state === 'recording') { self.stop(clearRecordedDataCB); } clearRecordedDataCB(); }; function clearRecordedDataCB() { arrayOfBlobs = []; mediaRecorder = null; self.timestamps = []; } // Reference to "MediaRecorder" object var mediaRecorder; /** * Access to native MediaRecorder API * @method * @memberof MediaStreamRecorder * @instance * @example * var internal = recorder.getInternalRecorder(); * internal.ondataavailable = function() {}; // override * internal.stream, internal.onpause, internal.onstop, etc. * @returns {Object} Returns internal recording object. */ this.getInternalRecorder = function() { return mediaRecorder; }; function isMediaStreamActive() { if ('active' in mediaStream) { if (!mediaStream.active) { return false; } } else if ('ended' in mediaStream) { // old hack if (mediaStream.ended) { return false; } } return true; } /** * @property {Blob} blob - Recorded data as "Blob" object. * @memberof MediaStreamRecorder * @example * recorder.stop(function() { * var blob = recorder.blob; * }); */ this.blob = null; /** * Get MediaRecorder readonly state. * @method * @memberof MediaStreamRecorder * @example * var state = recorder.getState(); * @returns {String} Returns recording state. */ this.getState = function() { if (!mediaRecorder) { return 'inactive'; } return mediaRecorder.state || 'inactive'; }; // list of all recording states var allStates = []; /** * Get MediaRecorder all recording states. * @method * @memberof MediaStreamRecorder * @example * var state = recorder.getAllStates(); * @returns {Array} Returns all recording states */ this.getAllStates = function() { return allStates; }; // if any Track within the MediaStream is muted or not enabled at any time, // the browser will only record black frames // or silence since that is the content produced by the Track // so we need to stopRecording as soon as any single track ends. if (typeof config.checkForInactiveTracks === 'undefined') { config.checkForInactiveTracks = false; // disable to minimize CPU usage } var self = this; // this method checks if media stream is stopped // or if any track is ended. (function looper() { if (!mediaRecorder || config.checkForInactiveTracks === false) { return; } if (isMediaStreamActive() === false) { if (!config.disableLogs) { console.log('MediaStream seems stopped.'); } self.stop(); return; } setTimeout(looper, 1000); // check every second })(); // for debugging this.name = 'MediaStreamRecorder'; this.toString = function() { return this.name; }; } if (typeof RecordRTC !== 'undefined') { RecordRTC.MediaStreamRecorder = MediaStreamRecorder; } // source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js // https://github.com/mattdiamond/Recorderjs#license-mit // ______________________ // StereoAudioRecorder.js /** * StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome. * @summary JavaScript standalone object for stereo audio recording. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef StereoAudioRecorder * @class * @example * var recorder = new StereoAudioRecorder(MediaStream, { * sampleRate: 44100, * bufferSize: 4096 * }); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.} */ function StereoAudioRecorder(mediaStream, config) { if (!getTracks(mediaStream, 'audio').length) { throw 'Your stream has no audio tracks.'; } config = config || {}; var self = this; // variables var leftchannel = []; var rightchannel = []; var recording = false; var recordingLength = 0; var jsAudioNode; var numberOfAudioChannels = 2; /** * Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182 * @property {number} desiredSampRate - Desired Bits per sample * 1000 * @memberof StereoAudioRecorder * @instance * @example * var recorder = StereoAudioRecorder(mediaStream, { * desiredSampRate: 16 * 1000 // bits-per-sample * 1000 * }); */ var desiredSampRate = config.desiredSampRate; // backward compatibility if (config.leftChannel === true) { numberOfAudioChannels = 1; } if (config.numberOfAudioChannels === 1) { numberOfAudioChannels = 1; } if (!numberOfAudioChannels || numberOfAudioChannels < 1) { numberOfAudioChannels = 2; } if (!config.disableLogs) { console.log('StereoAudioRecorder is set to record number of channels: ' + numberOfAudioChannels); } // if any Track within the MediaStream is muted or not enabled at any time, // the browser will only record black frames // or silence since that is the content produced by the Track // so we need to stopRecording as soon as any single track ends. if (typeof config.checkForInactiveTracks === 'undefined') { config.checkForInactiveTracks = true; } function isMediaStreamActive() { if (config.checkForInactiveTracks === false) { // always return "true" return true; } if ('active' in mediaStream) { if (!mediaStream.active) { return false; } } else if ('ended' in mediaStream) { // old hack if (mediaStream.ended) { return false; } } return true; } /** * This method records MediaStream. * @method * @memberof StereoAudioRecorder * @example * recorder.record(); */ this.record = function() { if (isMediaStreamActive() === false) { throw 'Please make sure MediaStream is active.'; } resetVariables(); isAudioProcessStarted = isPaused = false; recording = true; if (typeof config.timeSlice !== 'undefined') { looper(); } }; function mergeLeftRightBuffers(config, callback) { function mergeAudioBuffers(config, cb) { var numberOfAudioChannels = config.numberOfAudioChannels; // todo: "slice(0)" --- is it causes loop? Should be removed? var leftBuffers = config.leftBuffers.slice(0); var rightBuffers = config.rightBuffers.slice(0); var sampleRate = config.sampleRate; var internalInterleavedLength = config.internalInterleavedLength; var desiredSampRate = config.desiredSampRate; if (numberOfAudioChannels === 2) { leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength); rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength); if (desiredSampRate) { leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate); rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate); } } if (numberOfAudioChannels === 1) { leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength); if (desiredSampRate) { leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate); } } // set sample rate as desired sample rate if (desiredSampRate) { sampleRate = desiredSampRate; } // for changing the sampling rate, reference: // http://stackoverflow.com/a/28977136/552182 function interpolateArray(data, newSampleRate, oldSampleRate) { var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate)); var newData = []; var springFactor = Number((data.length - 1) / (fitCount - 1)); newData[0] = data[0]; for (var i = 1; i < fitCount - 1; i++) { var tmp = i * springFactor; var before = Number(Math.floor(tmp)).toFixed(); var after = Number(Math.ceil(tmp)).toFixed(); var atPoint = tmp - before; newData[i] = linearInterpolate(data[before], data[after], atPoint); } newData[fitCount - 1] = data[data.length - 1]; return newData; } function linearInterpolate(before, after, atPoint) { return before + (after - before) * atPoint; } function mergeBuffers(channelBuffer, rLength) { var result = new Float64Array(rLength); var offset = 0; var lng = channelBuffer.length; for (var i = 0; i < lng; i++) { var buffer = channelBuffer[i]; result.set(buffer, offset); offset += buffer.length; } return result; } function interleave(leftChannel, rightChannel) { var length = leftChannel.length + rightChannel.length; var result = new Float64Array(length); var inputIndex = 0; for (var index = 0; index < length;) { result[index++] = leftChannel[inputIndex]; result[index++] = rightChannel[inputIndex]; inputIndex++; } return result; } function writeUTFBytes(view, offset, string) { var lng = string.length; for (var i = 0; i < lng; i++) { view.setUint8(offset + i, string.charCodeAt(i)); } } // interleave both channels together var interleaved; if (numberOfAudioChannels === 2) { interleaved = interleave(leftBuffers, rightBuffers); } if (numberOfAudioChannels === 1) { interleaved = leftBuffers; } var interleavedLength = interleaved.length; // create wav file var resultingBufferLength = 44 + interleavedLength * 2; var buffer = new ArrayBuffer(resultingBufferLength); var view = new DataView(buffer); // RIFF chunk descriptor/identifier writeUTFBytes(view, 0, 'RIFF'); // RIFF chunk length // changed "44" to "36" via #401 view.setUint32(4, 36 + interleavedLength * 2, true); // RIFF type writeUTFBytes(view, 8, 'WAVE'); // format chunk identifier // FMT sub-chunk writeUTFBytes(view, 12, 'fmt '); // format chunk length view.setUint32(16, 16, true); // sample format (raw) view.setUint16(20, 1, true); // stereo (2 channels) view.setUint16(22, numberOfAudioChannels, true); // sample rate view.setUint32(24, sampleRate, true); // byte rate (sample rate * block align) view.setUint32(28, sampleRate * numberOfAudioChannels * 2, true); // block align (channel count * bytes per sample) view.setUint16(32, numberOfAudioChannels * 2, true); // bits per sample view.setUint16(34, 16, true); // data sub-chunk // data chunk identifier writeUTFBytes(view, 36, 'data'); // data chunk length view.setUint32(40, interleavedLength * 2, true); // write the PCM samples var lng = interleavedLength; var index = 44; var volume = 1; for (var i = 0; i < lng; i++) { view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); index += 2; } if (cb) { return cb({ buffer: buffer, view: view }); } postMessage({ buffer: buffer, view: view }); } if (config.noWorker) { mergeAudioBuffers(config, function(data) { callback(data.buffer, data.view); }); return; } var webWorker = processInWebWorker(mergeAudioBuffers); webWorker.onmessage = function(event) { callback(event.data.buffer, event.data.view); // release memory URL.revokeObjectURL(webWorker.workerURL); // kill webworker (or Chrome will kill your page after ~25 calls) webWorker.terminate(); }; webWorker.postMessage(config); } function processInWebWorker(_function) { var workerURL = URL.createObjectURL(new Blob([_function.toString(), ';this.onmessage = function (eee) {' + _function.name + '(eee.data);}' ], { type: 'application/javascript' })); var worker = new Worker(workerURL); worker.workerURL = workerURL; return worker; } /** * This method stops recording MediaStream. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof StereoAudioRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { callback = callback || function() {}; // stop recording recording = false; mergeLeftRightBuffers({ desiredSampRate: desiredSampRate, sampleRate: sampleRate, numberOfAudioChannels: numberOfAudioChannels, internalInterleavedLength: recordingLength, leftBuffers: leftchannel, rightBuffers: numberOfAudioChannels === 1 ? [] : rightchannel, noWorker: config.noWorker }, function(buffer, view) { /** * @property {Blob} blob - The recorded blob object. * @memberof StereoAudioRecorder * @example * recorder.stop(function(){ * var blob = recorder.blob; * }); */ self.blob = new Blob([view], { type: 'audio/wav' }); /** * @property {ArrayBuffer} buffer - The recorded buffer object. * @memberof StereoAudioRecorder * @example * recorder.stop(function(){ * var buffer = recorder.buffer; * }); */ self.buffer = new ArrayBuffer(view.buffer.byteLength); /** * @property {DataView} view - The recorded data-view object. * @memberof StereoAudioRecorder * @example * recorder.stop(function(){ * var view = recorder.view; * }); */ self.view = view; self.sampleRate = desiredSampRate || sampleRate; self.bufferSize = bufferSize; // recorded audio length self.length = recordingLength; isAudioProcessStarted = false; if (callback) { callback(self.blob); } }); }; if (typeof RecordRTC.Storage === 'undefined') { RecordRTC.Storage = { AudioContextConstructor: null, AudioContext: window.AudioContext || window.webkitAudioContext }; } if (!RecordRTC.Storage.AudioContextConstructor || RecordRTC.Storage.AudioContextConstructor.state === 'closed') { RecordRTC.Storage.AudioContextConstructor = new RecordRTC.Storage.AudioContext(); } var context = RecordRTC.Storage.AudioContextConstructor; // creates an audio node from the microphone incoming stream var audioInput = context.createMediaStreamSource(mediaStream); var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384]; /** * From the spec: This value controls how frequently the audioprocess event is * dispatched and how many sample-frames need to be processed each call. * Lower values for buffer size will result in a lower (better) latency. * Higher values will be necessary to avoid audio breakup and glitches * The size of the buffer (in sample-frames) which needs to * be processed each time onprocessaudio is called. * Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384). * @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched. * @memberof StereoAudioRecorder * @example * recorder = new StereoAudioRecorder(mediaStream, { * bufferSize: 4096 * }); */ // "0" means, let chrome decide the most accurate buffer-size for current platform. var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize; if (legalBufferValues.indexOf(bufferSize) === -1) { if (!config.disableLogs) { console.log('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\t')); } } if (context.createJavaScriptNode) { jsAudioNode = context.createJavaScriptNode(bufferSize, numberOfAudioChannels, numberOfAudioChannels); } else if (context.createScriptProcessor) { jsAudioNode = context.createScriptProcessor(bufferSize, numberOfAudioChannels, numberOfAudioChannels); } else { throw 'WebAudio API has no support on this browser.'; } // connect the stream to the script processor audioInput.connect(jsAudioNode); if (!config.bufferSize) { bufferSize = jsAudioNode.bufferSize; // device buffer-size } /** * The sample rate (in sample-frames per second) at which the * AudioContext handles audio. It is assumed that all AudioNodes * in the context run at this rate. In making this assumption, * sample-rate converters or "varispeed" processors are not supported * in real-time processing. * The sampleRate parameter describes the sample-rate of the * linear PCM audio data in the buffer in sample-frames per second. * An implementation must support sample-rates in at least * the range 22050 to 96000. * @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched. * @memberof StereoAudioRecorder * @example * recorder = new StereoAudioRecorder(mediaStream, { * sampleRate: 44100 * }); */ var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100; if (sampleRate < 22050 || sampleRate > 96000) { // Ref: http://stackoverflow.com/a/26303918/552182 if (!config.disableLogs) { console.log('sample-rate must be under range 22050 and 96000.'); } } if (!config.disableLogs) { if (config.desiredSampRate) { console.log('Desired sample-rate: ' + config.desiredSampRate); } } var isPaused = false; /** * This method pauses the recording process. * @method * @memberof StereoAudioRecorder * @example * recorder.pause(); */ this.pause = function() { isPaused = true; }; /** * This method resumes the recording process. * @method * @memberof StereoAudioRecorder * @example * recorder.resume(); */ this.resume = function() { if (isMediaStreamActive() === false) { throw 'Please make sure MediaStream is active.'; } if (!recording) { if (!config.disableLogs) { console.log('Seems recording has been restarted.'); } this.record(); return; } isPaused = false; }; /** * This method resets currently recorded data. * @method * @memberof StereoAudioRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { config.checkForInactiveTracks = false; if (recording) { this.stop(clearRecordedDataCB); } clearRecordedDataCB(); }; function resetVariables() { leftchannel = []; rightchannel = []; recordingLength = 0; isAudioProcessStarted = false; recording = false; isPaused = false; context = null; self.leftchannel = leftchannel; self.rightchannel = rightchannel; self.numberOfAudioChannels = numberOfAudioChannels; self.desiredSampRate = desiredSampRate; self.sampleRate = sampleRate; self.recordingLength = recordingLength; intervalsBasedBuffers = { left: [], right: [], recordingLength: 0 }; } function clearRecordedDataCB() { if (jsAudioNode) { jsAudioNode.onaudioprocess = null; jsAudioNode.disconnect(); jsAudioNode = null; } if (audioInput) { audioInput.disconnect(); audioInput = null; } resetVariables(); } // for debugging this.name = 'StereoAudioRecorder'; this.toString = function() { return this.name; }; var isAudioProcessStarted = false; function onAudioProcessDataAvailable(e) { if (isPaused) { return; } if (isMediaStreamActive() === false) { if (!config.disableLogs) { console.log('MediaStream seems stopped.'); } jsAudioNode.disconnect(); recording = false; } if (!recording) { if (audioInput) { audioInput.disconnect(); audioInput = null; } return; } /** * This method is called on "onaudioprocess" event's first invocation. * @method {function} onAudioProcessStarted * @memberof StereoAudioRecorder * @example * recorder.onAudioProcessStarted: function() { }; */ if (!isAudioProcessStarted) { isAudioProcessStarted = true; if (config.onAudioProcessStarted) { config.onAudioProcessStarted(); } if (config.initCallback) { config.initCallback(); } } var left = e.inputBuffer.getChannelData(0); // we clone the samples var chLeft = new Float32Array(left); leftchannel.push(chLeft); if (numberOfAudioChannels === 2) { var right = e.inputBuffer.getChannelData(1); var chRight = new Float32Array(right); rightchannel.push(chRight); } recordingLength += bufferSize; // export raw PCM self.recordingLength = recordingLength; if (typeof config.timeSlice !== 'undefined') { intervalsBasedBuffers.recordingLength += bufferSize; intervalsBasedBuffers.left.push(chLeft); if (numberOfAudioChannels === 2) { intervalsBasedBuffers.right.push(chRight); } } } jsAudioNode.onaudioprocess = onAudioProcessDataAvailable; // to prevent self audio to be connected with speakers if (context.createMediaStreamDestination) { jsAudioNode.connect(context.createMediaStreamDestination()); } else { jsAudioNode.connect(context.destination); } // export raw PCM this.leftchannel = leftchannel; this.rightchannel = rightchannel; this.numberOfAudioChannels = numberOfAudioChannels; this.desiredSampRate = desiredSampRate; this.sampleRate = sampleRate; self.recordingLength = recordingLength; // helper for intervals based blobs var intervalsBasedBuffers = { left: [], right: [], recordingLength: 0 }; // this looper is used to support intervals based blobs (via timeSlice+ondataavailable) function looper() { if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') { return; } if (intervalsBasedBuffers.left.length) { mergeLeftRightBuffers({ desiredSampRate: desiredSampRate, sampleRate: sampleRate, numberOfAudioChannels: numberOfAudioChannels, internalInterleavedLength: intervalsBasedBuffers.recordingLength, leftBuffers: intervalsBasedBuffers.left, rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right }, function(buffer, view) { var blob = new Blob([view], { type: 'audio/wav' }); config.ondataavailable(blob); setTimeout(looper, config.timeSlice); }); intervalsBasedBuffers = { left: [], right: [], recordingLength: 0 }; } else { setTimeout(looper, config.timeSlice); } } } if (typeof RecordRTC !== 'undefined') { RecordRTC.StereoAudioRecorder = StereoAudioRecorder; } // _________________ // CanvasRecorder.js /** * CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}. * @summary HTML2Canvas recording into video WebM. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef CanvasRecorder * @class * @example * var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true }); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc. * @param {object} config - {disableLogs:true, initCallback: function} */ function CanvasRecorder(htmlElement, config) { if (typeof html2canvas === 'undefined') { throw 'Please link: https://www.webrtc-experiment.com/screenshot.js'; } config = config || {}; if (!config.frameInterval) { config.frameInterval = 10; } // via DetectRTC.js var isCanvasSupportsStreamCapturing = false; ['captureStream', 'mozCaptureStream', 'webkitCaptureStream'].forEach(function(item) { if (item in document.createElement('canvas')) { isCanvasSupportsStreamCapturing = true; } }); var _isChrome = (!!window.webkitRTCPeerConnection || !!window.webkitGetUserMedia) && !!window.chrome; var chromeVersion = 50; var matchArray = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./); if (_isChrome && matchArray && matchArray[2]) { chromeVersion = parseInt(matchArray[2], 10); } if (_isChrome && chromeVersion < 52) { isCanvasSupportsStreamCapturing = false; } if (config.useWhammyRecorder) { isCanvasSupportsStreamCapturing = false; } var globalCanvas, mediaStreamRecorder; if (isCanvasSupportsStreamCapturing) { if (!config.disableLogs) { console.log('Your browser supports both MediRecorder API and canvas.captureStream!'); } if (htmlElement instanceof HTMLCanvasElement) { globalCanvas = htmlElement; } else if (htmlElement instanceof CanvasRenderingContext2D) { globalCanvas = htmlElement.canvas; } else { throw 'Please pass either HTMLCanvasElement or CanvasRenderingContext2D.'; } } else if (!!navigator.mozGetUserMedia) { if (!config.disableLogs) { console.error('Canvas recording is NOT supported in Firefox.'); } } var isRecording; /** * This method records Canvas. * @method * @memberof CanvasRecorder * @example * recorder.record(); */ this.record = function() { isRecording = true; if (isCanvasSupportsStreamCapturing && !config.useWhammyRecorder) { // CanvasCaptureMediaStream var canvasMediaStream; if ('captureStream' in globalCanvas) { canvasMediaStream = globalCanvas.captureStream(25); // 25 FPS } else if ('mozCaptureStream' in globalCanvas) { canvasMediaStream = globalCanvas.mozCaptureStream(25); } else if ('webkitCaptureStream' in globalCanvas) { canvasMediaStream = globalCanvas.webkitCaptureStream(25); } try { var mdStream = new MediaStream(); mdStream.addTrack(getTracks(canvasMediaStream, 'video')[0]); canvasMediaStream = mdStream; } catch (e) {} if (!canvasMediaStream) { throw 'captureStream API are NOT available.'; } // Note: Jan 18, 2016 status is that, // Firefox MediaRecorder API can't record CanvasCaptureMediaStream object. mediaStreamRecorder = new MediaStreamRecorder(canvasMediaStream, { mimeType: config.mimeType || 'video/webm' }); mediaStreamRecorder.record(); } else { whammy.frames = []; lastTime = new Date().getTime(); drawCanvasFrame(); } if (config.initCallback) { config.initCallback(); } }; this.getWebPImages = function(callback) { if (htmlElement.nodeName.toLowerCase() !== 'canvas') { callback(); return; } var framesLength = whammy.frames.length; whammy.frames.forEach(function(frame, idx) { var framesRemaining = framesLength - idx; if (!config.disableLogs) { console.log(framesRemaining + '/' + framesLength + ' frames remaining'); } if (config.onEncodingCallback) { config.onEncodingCallback(framesRemaining, framesLength); } var webp = frame.image.toDataURL('image/webp', 1); whammy.frames[idx].image = webp; }); if (!config.disableLogs) { console.log('Generating WebM'); } callback(); }; /** * This method stops recording Canvas. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof CanvasRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { isRecording = false; var that = this; if (isCanvasSupportsStreamCapturing && mediaStreamRecorder) { mediaStreamRecorder.stop(callback); return; } this.getWebPImages(function() { /** * @property {Blob} blob - Recorded frames in video/webm blob. * @memberof CanvasRecorder * @example * recorder.stop(function() { * var blob = recorder.blob; * }); */ whammy.compile(function(blob) { if (!config.disableLogs) { console.log('Recording finished!'); } that.blob = blob; if (that.blob.forEach) { that.blob = new Blob([], { type: 'video/webm' }); } if (callback) { callback(that.blob); } whammy.frames = []; }); }); }; var isPausedRecording = false; /** * This method pauses the recording process. * @method * @memberof CanvasRecorder * @example * recorder.pause(); */ this.pause = function() { isPausedRecording = true; if (mediaStreamRecorder instanceof MediaStreamRecorder) { mediaStreamRecorder.pause(); return; } }; /** * This method resumes the recording process. * @method * @memberof CanvasRecorder * @example * recorder.resume(); */ this.resume = function() { isPausedRecording = false; if (mediaStreamRecorder instanceof MediaStreamRecorder) { mediaStreamRecorder.resume(); return; } if (!isRecording) { this.record(); } }; /** * This method resets currently recorded data. * @method * @memberof CanvasRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { if (isRecording) { this.stop(clearRecordedDataCB); } clearRecordedDataCB(); }; function clearRecordedDataCB() { whammy.frames = []; isRecording = false; isPausedRecording = false; } // for debugging this.name = 'CanvasRecorder'; this.toString = function() { return this.name; }; function cloneCanvas() { //create a new canvas var newCanvas = document.createElement('canvas'); var context = newCanvas.getContext('2d'); //set dimensions newCanvas.width = htmlElement.width; newCanvas.height = htmlElement.height; //apply the old canvas to the new one context.drawImage(htmlElement, 0, 0); //return the new canvas return newCanvas; } function drawCanvasFrame() { if (isPausedRecording) { lastTime = new Date().getTime(); return setTimeout(drawCanvasFrame, 500); } if (htmlElement.nodeName.toLowerCase() === 'canvas') { var duration = new Date().getTime() - lastTime; // via #206, by Jack i.e. @Seymourr lastTime = new Date().getTime(); whammy.frames.push({ image: cloneCanvas(), duration: duration }); if (isRecording) { setTimeout(drawCanvasFrame, config.frameInterval); } return; } html2canvas(htmlElement, { grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer, onrendered: function(canvas) { var duration = new Date().getTime() - lastTime; if (!duration) { return setTimeout(drawCanvasFrame, config.frameInterval); } // via #206, by Jack i.e. @Seymourr lastTime = new Date().getTime(); whammy.frames.push({ image: canvas.toDataURL('image/webp', 1), duration: duration }); if (isRecording) { setTimeout(drawCanvasFrame, config.frameInterval); } } }); } var lastTime = new Date().getTime(); var whammy = new Whammy.Video(100); } if (typeof RecordRTC !== 'undefined') { RecordRTC.CanvasRecorder = CanvasRecorder; } // _________________ // WhammyRecorder.js /** * WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}. * @summary Video recording feature in Chrome. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef WhammyRecorder * @class * @example * var recorder = new WhammyRecorder(mediaStream); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.} */ function WhammyRecorder(mediaStream, config) { config = config || {}; if (!config.frameInterval) { config.frameInterval = 10; } if (!config.disableLogs) { console.log('Using frames-interval:', config.frameInterval); } /** * This method records video. * @method * @memberof WhammyRecorder * @example * recorder.record(); */ this.record = function() { if (!config.width) { config.width = 320; } if (!config.height) { config.height = 240; } if (!config.video) { config.video = { width: config.width, height: config.height }; } if (!config.canvas) { config.canvas = { width: config.width, height: config.height }; } canvas.width = config.canvas.width || 320; canvas.height = config.canvas.height || 240; context = canvas.getContext('2d'); // setting defaults if (config.video && config.video instanceof HTMLVideoElement) { video = config.video.cloneNode(); if (config.initCallback) { config.initCallback(); } } else { video = document.createElement('video'); setSrcObject(mediaStream, video); video.onloadedmetadata = function() { // "onloadedmetadata" may NOT work in FF? if (config.initCallback) { config.initCallback(); } }; video.width = config.video.width; video.height = config.video.height; } video.muted = true; video.play(); lastTime = new Date().getTime(); whammy = new Whammy.Video(); if (!config.disableLogs) { console.log('canvas resolutions', canvas.width, '*', canvas.height); console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height); } drawFrames(config.frameInterval); }; /** * Draw and push frames to Whammy * @param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy */ function drawFrames(frameInterval) { frameInterval = typeof frameInterval !== 'undefined' ? frameInterval : 10; var duration = new Date().getTime() - lastTime; if (!duration) { return setTimeout(drawFrames, frameInterval, frameInterval); } if (isPausedRecording) { lastTime = new Date().getTime(); return setTimeout(drawFrames, 100); } // via #206, by Jack i.e. @Seymourr lastTime = new Date().getTime(); if (video.paused) { // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316 // Tweak for Android Chrome video.play(); } context.drawImage(video, 0, 0, canvas.width, canvas.height); whammy.frames.push({ duration: duration, image: canvas.toDataURL('image/webp') }); if (!isStopDrawing) { setTimeout(drawFrames, frameInterval, frameInterval); } } function asyncLoop(o) { var i = -1, length = o.length; (function loop() { i++; if (i === length) { o.callback(); return; } // "setTimeout" added by Jim McLeod setTimeout(function() { o.functionToLoop(loop, i); }, 1); })(); } /** * remove black frames from the beginning to the specified frame * @param {Array} _frames - array of frames to be checked * @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found) * @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all * @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all * @returns {Array} - array of frames */ // pull#293 by @volodalexey function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance, callback) { var localCanvas = document.createElement('canvas'); localCanvas.width = canvas.width; localCanvas.height = canvas.height; var context2d = localCanvas.getContext('2d'); var resultFrames = []; var checkUntilNotBlack = _framesToCheck === -1; var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ? _framesToCheck : _frames.length; var sampleColor = { r: 0, g: 0, b: 0 }; var maxColorDifference = Math.sqrt( Math.pow(255, 2) + Math.pow(255, 2) + Math.pow(255, 2) ); var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0; var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0; var doNotCheckNext = false; asyncLoop({ length: endCheckFrame, functionToLoop: function(loop, f) { var matchPixCount, endPixCheck, maxPixCount; var finishImage = function() { if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) ; else { // console.log('frame is passed : ' + f); if (checkUntilNotBlack) { doNotCheckNext = true; } resultFrames.push(_frames[f]); } loop(); }; if (!doNotCheckNext) { var image = new Image(); image.onload = function() { context2d.drawImage(image, 0, 0, canvas.width, canvas.height); var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height); matchPixCount = 0; endPixCheck = imageData.data.length; maxPixCount = imageData.data.length / 4; for (var pix = 0; pix < endPixCheck; pix += 4) { var currentColor = { r: imageData.data[pix], g: imageData.data[pix + 1], b: imageData.data[pix + 2] }; var colorDifference = Math.sqrt( Math.pow(currentColor.r - sampleColor.r, 2) + Math.pow(currentColor.g - sampleColor.g, 2) + Math.pow(currentColor.b - sampleColor.b, 2) ); // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2) if (colorDifference <= maxColorDifference * pixTolerance) { matchPixCount++; } } finishImage(); }; image.src = _frames[f].image; } else { finishImage(); } }, callback: function() { resultFrames = resultFrames.concat(_frames.slice(endCheckFrame)); if (resultFrames.length <= 0) { // at least one last frame should be available for next manipulation // if total duration of all frames will be < 1000 than ffmpeg doesn't work well... resultFrames.push(_frames[_frames.length - 1]); } callback(resultFrames); } }); } var isStopDrawing = false; /** * This method stops recording video. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof WhammyRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { callback = callback || function() {}; isStopDrawing = true; var _this = this; // analyse of all frames takes some time! setTimeout(function() { // e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames // e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames // e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color dropBlackFrames(whammy.frames, -1, null, null, function(frames) { whammy.frames = frames; // to display advertisement images! if (config.advertisement && config.advertisement.length) { whammy.frames = config.advertisement.concat(whammy.frames); } /** * @property {Blob} blob - Recorded frames in video/webm blob. * @memberof WhammyRecorder * @example * recorder.stop(function() { * var blob = recorder.blob; * }); */ whammy.compile(function(blob) { _this.blob = blob; if (_this.blob.forEach) { _this.blob = new Blob([], { type: 'video/webm' }); } if (callback) { callback(_this.blob); } }); }); }, 10); }; var isPausedRecording = false; /** * This method pauses the recording process. * @method * @memberof WhammyRecorder * @example * recorder.pause(); */ this.pause = function() { isPausedRecording = true; }; /** * This method resumes the recording process. * @method * @memberof WhammyRecorder * @example * recorder.resume(); */ this.resume = function() { isPausedRecording = false; if (isStopDrawing) { this.record(); } }; /** * This method resets currently recorded data. * @method * @memberof WhammyRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { if (!isStopDrawing) { this.stop(clearRecordedDataCB); } clearRecordedDataCB(); }; function clearRecordedDataCB() { whammy.frames = []; isStopDrawing = true; isPausedRecording = false; } // for debugging this.name = 'WhammyRecorder'; this.toString = function() { return this.name; }; var canvas = document.createElement('canvas'); var context = canvas.getContext('2d'); var video; var lastTime; var whammy; } if (typeof RecordRTC !== 'undefined') { RecordRTC.WhammyRecorder = WhammyRecorder; } // https://github.com/antimatter15/whammy/blob/master/LICENSE // _________ // Whammy.js // todo: Firefox now supports webp for webm containers! // their MediaRecorder implementation works well! // should we provide an option to record via Whammy.js or MediaRecorder API is a better solution? /** * Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15} * @summary A real time javascript webm encoder based on a canvas hack. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef Whammy * @class * @example * var recorder = new Whammy().Video(15); * recorder.add(context || canvas || dataURL); * var output = recorder.compile(); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ var Whammy = (function() { // a more abstract-ish API function WhammyVideo(duration) { this.frames = []; this.duration = duration || 1; this.quality = 0.8; } /** * Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder. * @method * @memberof Whammy * @example * recorder = new Whammy().Video(0.8, 100); * recorder.add(canvas || context || 'image/webp'); * @param {string} frame - Canvas || Context || image/webp * @param {number} duration - Stick a duration (in milliseconds) */ WhammyVideo.prototype.add = function(frame, duration) { if ('canvas' in frame) { //CanvasRenderingContext2D frame = frame.canvas; } if ('toDataURL' in frame) { frame = frame.toDataURL('image/webp', this.quality); } if (!(/^data:image\/webp;base64,/ig).test(frame)) { throw 'Input must be formatted properly as a base64 encoded DataURI of type image/webp'; } this.frames.push({ image: frame, duration: duration || this.duration }); }; function processInWebWorker(_function) { var blob = URL.createObjectURL(new Blob([_function.toString(), 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}' ], { type: 'application/javascript' })); var worker = new Worker(blob); URL.revokeObjectURL(blob); return worker; } function whammyInWebWorker(frames) { function ArrayToWebM(frames) { var info = checkFrames(frames); if (!info) { return []; } var clusterMaxDuration = 30000; var EBML = [{ 'id': 0x1a45dfa3, // EBML 'data': [{ 'data': 1, 'id': 0x4286 // EBMLVersion }, { 'data': 1, 'id': 0x42f7 // EBMLReadVersion }, { 'data': 4, 'id': 0x42f2 // EBMLMaxIDLength }, { 'data': 8, 'id': 0x42f3 // EBMLMaxSizeLength }, { 'data': 'webm', 'id': 0x4282 // DocType }, { 'data': 2, 'id': 0x4287 // DocTypeVersion }, { 'data': 2, 'id': 0x4285 // DocTypeReadVersion }] }, { 'id': 0x18538067, // Segment 'data': [{ 'id': 0x1549a966, // Info 'data': [{ 'data': 1e6, //do things in millisecs (num of nanosecs for duration scale) 'id': 0x2ad7b1 // TimecodeScale }, { 'data': 'whammy', 'id': 0x4d80 // MuxingApp }, { 'data': 'whammy', 'id': 0x5741 // WritingApp }, { 'data': doubleToString(info.duration), 'id': 0x4489 // Duration }] }, { 'id': 0x1654ae6b, // Tracks 'data': [{ 'id': 0xae, // TrackEntry 'data': [{ 'data': 1, 'id': 0xd7 // TrackNumber }, { 'data': 1, 'id': 0x73c5 // TrackUID }, { 'data': 0, 'id': 0x9c // FlagLacing }, { 'data': 'und', 'id': 0x22b59c // Language }, { 'data': 'V_VP8', 'id': 0x86 // CodecID }, { 'data': 'VP8', 'id': 0x258688 // CodecName }, { 'data': 1, 'id': 0x83 // TrackType }, { 'id': 0xe0, // Video 'data': [{ 'data': info.width, 'id': 0xb0 // PixelWidth }, { 'data': info.height, 'id': 0xba // PixelHeight }] }] }] }] }]; //Generate clusters (max duration) var frameNumber = 0; var clusterTimecode = 0; while (frameNumber < frames.length) { var clusterFrames = []; var clusterDuration = 0; do { clusterFrames.push(frames[frameNumber]); clusterDuration += frames[frameNumber].duration; frameNumber++; } while (frameNumber < frames.length && clusterDuration < clusterMaxDuration); var clusterCounter = 0; var cluster = { 'id': 0x1f43b675, // Cluster 'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames) }; //Add cluster to segment EBML[1].data.push(cluster); clusterTimecode += clusterDuration; } return generateEBML(EBML); } function getClusterData(clusterTimecode, clusterCounter, clusterFrames) { return [{ 'data': clusterTimecode, 'id': 0xe7 // Timecode }].concat(clusterFrames.map(function(webp) { var block = makeSimpleBlock({ discardable: 0, frame: webp.data.slice(4), invisible: 0, keyframe: 1, lacing: 0, trackNum: 1, timecode: Math.round(clusterCounter) }); clusterCounter += webp.duration; return { data: block, id: 0xa3 }; })); } // sums the lengths of all the frames and gets the duration function checkFrames(frames) { if (!frames[0]) { postMessage({ error: 'Something went wrong. Maybe WebP format is not supported in the current browser.' }); return; } var width = frames[0].width, height = frames[0].height, duration = frames[0].duration; for (var i = 1; i < frames.length; i++) { duration += frames[i].duration; } return { duration: duration, width: width, height: height }; } function numToBuffer(num) { var parts = []; while (num > 0) { parts.push(num & 0xff); num = num >> 8; } return new Uint8Array(parts.reverse()); } function strToBuffer(str) { return new Uint8Array(str.split('').map(function(e) { return e.charCodeAt(0); })); } function bitsToBuffer(bits) { var data = []; var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : ''; bits = pad + bits; for (var i = 0; i < bits.length; i += 8) { data.push(parseInt(bits.substr(i, 8), 2)); } return new Uint8Array(data); } function generateEBML(json) { var ebml = []; for (var i = 0; i < json.length; i++) { var data = json[i].data; if (typeof data === 'object') { data = generateEBML(data); } if (typeof data === 'number') { data = bitsToBuffer(data.toString(2)); } if (typeof data === 'string') { data = strToBuffer(data); } var len = data.size || data.byteLength || data.length; var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8); var sizeToString = len.toString(2); var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString; var size = (new Array(zeroes)).join('0') + '1' + padded; ebml.push(numToBuffer(json[i].id)); ebml.push(bitsToBuffer(size)); ebml.push(data); } return new Blob(ebml, { type: 'video/webm' }); } function makeSimpleBlock(data) { var flags = 0; if (data.keyframe) { flags |= 128; } if (data.invisible) { flags |= 8; } if (data.lacing) { flags |= (data.lacing << 1); } if (data.discardable) { flags |= 1; } if (data.trackNum > 127) { throw 'TrackNumber > 127 not supported'; } var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) { return String.fromCharCode(e); }).join('') + data.frame; return out; } function parseWebP(riff) { var VP8 = riff.RIFF[0].WEBP[0]; var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header for (var i = 0, c = []; i < 4; i++) { c[i] = VP8.charCodeAt(frameStart + 3 + i); } var width, height, tmp; //the code below is literally copied verbatim from the bitstream spec tmp = (c[1] << 8) | c[0]; width = tmp & 0x3FFF; tmp = (c[3] << 8) | c[2]; height = tmp & 0x3FFF; return { width: width, height: height, data: VP8, riff: riff }; } function getStrLength(string, offset) { return parseInt(string.substr(offset + 4, 4).split('').map(function(i) { var unpadded = i.charCodeAt(0).toString(2); return (new Array(8 - unpadded.length + 1)).join('0') + unpadded; }).join(''), 2); } function parseRIFF(string) { var offset = 0; var chunks = {}; while (offset < string.length) { var id = string.substr(offset, 4); var len = getStrLength(string, offset); var data = string.substr(offset + 4 + 4, len); offset += 4 + 4 + len; chunks[id] = chunks[id] || []; if (id === 'RIFF' || id === 'LIST') { chunks[id].push(parseRIFF(data)); } else { chunks[id].push(data); } } return chunks; } function doubleToString(num) { return [].slice.call( new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) { return String.fromCharCode(e); }).reverse().join(''); } var webm = new ArrayToWebM(frames.map(function(frame) { var webp = parseWebP(parseRIFF(atob(frame.image.slice(23)))); webp.duration = frame.duration; return webp; })); postMessage(webm); } /** * Encodes frames in WebM container. It uses WebWorkinvoke to invoke 'ArrayToWebM' method. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof Whammy * @example * recorder = new Whammy().Video(0.8, 100); * recorder.compile(function(blob) { * // blob.size - blob.type * }); */ WhammyVideo.prototype.compile = function(callback) { var webWorker = processInWebWorker(whammyInWebWorker); webWorker.onmessage = function(event) { if (event.data.error) { console.error(event.data.error); return; } callback(event.data); }; webWorker.postMessage(this.frames); }; return { /** * A more abstract-ish API. * @method * @memberof Whammy * @example * recorder = new Whammy().Video(0.8, 100); * @param {?number} speed - 0.8 * @param {?number} quality - 100 */ Video: WhammyVideo }; })(); if (typeof RecordRTC !== 'undefined') { RecordRTC.Whammy = Whammy; } // ______________ (indexed-db) // DiskStorage.js /** * DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage. * @summary Writing blobs into IndexedDB. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @example * DiskStorage.Store({ * audioBlob: yourAudioBlob, * videoBlob: yourVideoBlob, * gifBlob : yourGifBlob * }); * DiskStorage.Fetch(function(dataURL, type) { * if(type === 'audioBlob') { } * if(type === 'videoBlob') { } * if(type === 'gifBlob') { } * }); * // DiskStorage.dataStoreName = 'recordRTC'; * // DiskStorage.onError = function(error) { }; * @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally. * @property {function} Fetch - This method fetches stored blobs from IndexedDB. * @property {function} Store - This method stores blobs in IndexedDB. * @property {function} onError - This function is invoked for any known/unknown error. * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ var DiskStorage = { /** * This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally. * @method * @memberof DiskStorage * @internal * @example * DiskStorage.init(); */ init: function() { var self = this; if (typeof indexedDB === 'undefined' || typeof indexedDB.open === 'undefined') { console.error('IndexedDB API are not available in this browser.'); return; } var dbVersion = 1; var dbName = this.dbName || location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''), db; var request = indexedDB.open(dbName, dbVersion); function createObjectStore(dataBase) { dataBase.createObjectStore(self.dataStoreName); } function putInDB() { var transaction = db.transaction([self.dataStoreName], 'readwrite'); if (self.videoBlob) { transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob'); } if (self.gifBlob) { transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob'); } if (self.audioBlob) { transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob'); } function getFromStore(portionName) { transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) { if (self.callback) { self.callback(event.target.result, portionName); } }; } getFromStore('audioBlob'); getFromStore('videoBlob'); getFromStore('gifBlob'); } request.onerror = self.onError; request.onsuccess = function() { db = request.result; db.onerror = self.onError; if (db.setVersion) { if (db.version !== dbVersion) { var setVersion = db.setVersion(dbVersion); setVersion.onsuccess = function() { createObjectStore(db); putInDB(); }; } else { putInDB(); } } else { putInDB(); } }; request.onupgradeneeded = function(event) { createObjectStore(event.target.result); }; }, /** * This method fetches stored blobs from IndexedDB. * @method * @memberof DiskStorage * @internal * @example * DiskStorage.Fetch(function(dataURL, type) { * if(type === 'audioBlob') { } * if(type === 'videoBlob') { } * if(type === 'gifBlob') { } * }); */ Fetch: function(callback) { this.callback = callback; this.init(); return this; }, /** * This method stores blobs in IndexedDB. * @method * @memberof DiskStorage * @internal * @example * DiskStorage.Store({ * audioBlob: yourAudioBlob, * videoBlob: yourVideoBlob, * gifBlob : yourGifBlob * }); */ Store: function(config) { this.audioBlob = config.audioBlob; this.videoBlob = config.videoBlob; this.gifBlob = config.gifBlob; this.init(); return this; }, /** * This function is invoked for any known/unknown error. * @method * @memberof DiskStorage * @internal * @example * DiskStorage.onError = function(error){ * alerot( JSON.stringify(error) ); * }; */ onError: function(error) { console.error(JSON.stringify(error, null, '\t')); }, /** * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage. * @memberof DiskStorage * @internal * @example * DiskStorage.dataStoreName = 'recordRTC'; */ dataStoreName: 'recordRTC', dbName: null }; if (typeof RecordRTC !== 'undefined') { RecordRTC.DiskStorage = DiskStorage; } // ______________ // GifRecorder.js /** * GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef GifRecorder * @class * @example * var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 }); * recorder.record(); * recorder.stop(function(blob) { * img.src = URL.createObjectURL(blob); * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D. * @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10} */ function GifRecorder(mediaStream, config) { if (typeof GIFEncoder === 'undefined') { var script = document.createElement('script'); script.src = 'https://www.webrtc-experiment.com/gif-recorder.js'; (document.body || document.documentElement).appendChild(script); } config = config || {}; var isHTMLObject = mediaStream instanceof CanvasRenderingContext2D || mediaStream instanceof HTMLCanvasElement; /** * This method records MediaStream. * @method * @memberof GifRecorder * @example * recorder.record(); */ this.record = function() { if (typeof GIFEncoder === 'undefined') { setTimeout(self.record, 1000); return; } if (!isLoadedMetaData) { setTimeout(self.record, 1000); return; } if (!isHTMLObject) { if (!config.width) { config.width = video.offsetWidth || 320; } if (!config.height) { config.height = video.offsetHeight || 240; } if (!config.video) { config.video = { width: config.width, height: config.height }; } if (!config.canvas) { config.canvas = { width: config.width, height: config.height }; } canvas.width = config.canvas.width || 320; canvas.height = config.canvas.height || 240; video.width = config.video.width || 320; video.height = config.video.height || 240; } // external library to record as GIF images gifEncoder = new GIFEncoder(); // void setRepeat(int iter) // Sets the number of times the set of GIF frames should be played. // Default is 1; 0 means play indefinitely. gifEncoder.setRepeat(0); // void setFrameRate(Number fps) // Sets frame rate in frames per second. // Equivalent to setDelay(1000/fps). // Using "setDelay" instead of "setFrameRate" gifEncoder.setDelay(config.frameRate || 200); // void setQuality(int quality) // Sets quality of color quantization (conversion of images to the // maximum 256 colors allowed by the GIF specification). // Lower values (minimum = 1) produce better colors, // but slow processing significantly. 10 is the default, // and produces good color mapping at reasonable speeds. // Values greater than 20 do not yield significant improvements in speed. gifEncoder.setQuality(config.quality || 10); // Boolean start() // This writes the GIF Header and returns false if it fails. gifEncoder.start(); if (typeof config.onGifRecordingStarted === 'function') { config.onGifRecordingStarted(); } function drawVideoFrame(time) { if (self.clearedRecordedData === true) { return; } if (isPausedRecording) { return setTimeout(function() { drawVideoFrame(time); }, 100); } lastAnimationFrame = requestAnimationFrame(drawVideoFrame); if (typeof lastFrameTime === undefined) { lastFrameTime = time; } // ~10 fps if (time - lastFrameTime < 90) { return; } if (!isHTMLObject && video.paused) { // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316 // Tweak for Android Chrome video.play(); } if (!isHTMLObject) { context.drawImage(video, 0, 0, canvas.width, canvas.height); } if (config.onGifPreview) { config.onGifPreview(canvas.toDataURL('image/png')); } gifEncoder.addFrame(context); lastFrameTime = time; } lastAnimationFrame = requestAnimationFrame(drawVideoFrame); if (config.initCallback) { config.initCallback(); } }; /** * This method stops recording MediaStream. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof GifRecorder * @example * recorder.stop(function(blob) { * img.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { callback = callback || function() {}; if (lastAnimationFrame) { cancelAnimationFrame(lastAnimationFrame); } /** * @property {Blob} blob - The recorded blob object. * @memberof GifRecorder * @example * recorder.stop(function(){ * var blob = recorder.blob; * }); */ this.blob = new Blob([new Uint8Array(gifEncoder.stream().bin)], { type: 'image/gif' }); callback(this.blob); // bug: find a way to clear old recorded blobs gifEncoder.stream().bin = []; }; var isPausedRecording = false; /** * This method pauses the recording process. * @method * @memberof GifRecorder * @example * recorder.pause(); */ this.pause = function() { isPausedRecording = true; }; /** * This method resumes the recording process. * @method * @memberof GifRecorder * @example * recorder.resume(); */ this.resume = function() { isPausedRecording = false; }; /** * This method resets currently recorded data. * @method * @memberof GifRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { self.clearedRecordedData = true; clearRecordedDataCB(); }; function clearRecordedDataCB() { if (gifEncoder) { gifEncoder.stream().bin = []; } } // for debugging this.name = 'GifRecorder'; this.toString = function() { return this.name; }; var canvas = document.createElement('canvas'); var context = canvas.getContext('2d'); if (isHTMLObject) { if (mediaStream instanceof CanvasRenderingContext2D) { context = mediaStream; canvas = context.canvas; } else if (mediaStream instanceof HTMLCanvasElement) { context = mediaStream.getContext('2d'); canvas = mediaStream; } } var isLoadedMetaData = true; if (!isHTMLObject) { var video = document.createElement('video'); video.muted = true; video.autoplay = true; video.playsInline = true; isLoadedMetaData = false; video.onloadedmetadata = function() { isLoadedMetaData = true; }; setSrcObject(mediaStream, video); video.play(); } var lastAnimationFrame = null; var lastFrameTime; var gifEncoder; var self = this; } if (typeof RecordRTC !== 'undefined') { RecordRTC.GifRecorder = GifRecorder; } // Last time updated: 2019-06-21 4:09:42 AM UTC // ________________________ // MultiStreamsMixer v1.2.2 // Open-Sourced: https://github.com/muaz-khan/MultiStreamsMixer // -------------------------------------------------- // Muaz Khan - www.MuazKhan.com // MIT License - www.WebRTC-Experiment.com/licence // -------------------------------------------------- function MultiStreamsMixer(arrayOfMediaStreams, elementClass) { var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45'; (function(that) { if (typeof RecordRTC !== 'undefined') { return; } if (!that) { return; } if (typeof window !== 'undefined') { return; } if (typeof commonjsGlobal === 'undefined') { return; } commonjsGlobal.navigator = { userAgent: browserFakeUserAgent, getUserMedia: function() {} }; if (!commonjsGlobal.console) { commonjsGlobal.console = {}; } if (typeof commonjsGlobal.console.log === 'undefined' || typeof commonjsGlobal.console.error === 'undefined') { commonjsGlobal.console.error = commonjsGlobal.console.log = commonjsGlobal.console.log || function() { console.log(arguments); }; } if (typeof document === 'undefined') { /*global document:true */ that.document = { documentElement: { appendChild: function() { return ''; } } }; document.createElement = document.captureStream = document.mozCaptureStream = function() { var obj = { getContext: function() { return obj; }, play: function() {}, pause: function() {}, drawImage: function() {}, toDataURL: function() { return ''; }, style: {} }; return obj; }; that.HTMLVideoElement = function() {}; } if (typeof location === 'undefined') { /*global location:true */ that.location = { protocol: 'file:', href: '', hash: '' }; } if (typeof screen === 'undefined') { /*global screen:true */ that.screen = { width: 0, height: 0 }; } if (typeof URL === 'undefined') { /*global screen:true */ that.URL = { createObjectURL: function() { return ''; }, revokeObjectURL: function() { return ''; } }; } /*global window:true */ that.window = commonjsGlobal; })(typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : null); // requires: chrome://flags/#enable-experimental-web-platform-features elementClass = elementClass || 'multi-streams-mixer'; var videos = []; var isStopDrawingFrames = false; var canvas = document.createElement('canvas'); var context = canvas.getContext('2d'); canvas.style.opacity = 0; canvas.style.position = 'absolute'; canvas.style.zIndex = -1; canvas.style.top = '-1000em'; canvas.style.left = '-1000em'; canvas.className = elementClass; (document.body || document.documentElement).appendChild(canvas); this.disableLogs = false; this.frameInterval = 10; this.width = 360; this.height = 240; // use gain node to prevent echo this.useGainNode = true; var self = this; // _____________________________ // Cross-Browser-Declarations.js // WebAudio API representer var AudioContext = window.AudioContext; if (typeof AudioContext === 'undefined') { if (typeof webkitAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = webkitAudioContext; } if (typeof mozAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = mozAudioContext; } } /*jshint -W079 */ var URL = window.URL; if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') { /*global URL:true */ URL = webkitURL; } if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator? if (typeof navigator.webkitGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.webkitGetUserMedia; } if (typeof navigator.mozGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.mozGetUserMedia; } } var MediaStream = window.MediaStream; if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') { MediaStream = webkitMediaStream; } /*global MediaStream:true */ if (typeof MediaStream !== 'undefined') { // override "stop" method for all browsers if (typeof MediaStream.prototype.stop === 'undefined') { MediaStream.prototype.stop = function() { this.getTracks().forEach(function(track) { track.stop(); }); }; } } var Storage = {}; if (typeof AudioContext !== 'undefined') { Storage.AudioContext = AudioContext; } else if (typeof webkitAudioContext !== 'undefined') { Storage.AudioContext = webkitAudioContext; } function setSrcObject(stream, element) { if ('srcObject' in element) { element.srcObject = stream; } else if ('mozSrcObject' in element) { element.mozSrcObject = stream; } else { element.srcObject = stream; } } this.startDrawingFrames = function() { drawVideosToCanvas(); }; function drawVideosToCanvas() { if (isStopDrawingFrames) { return; } var videosLength = videos.length; var fullcanvas = false; var remaining = []; videos.forEach(function(video) { if (!video.stream) { video.stream = {}; } if (video.stream.fullcanvas) { fullcanvas = video; } else { // todo: video.stream.active or video.stream.live to fix blank frames issues? remaining.push(video); } }); if (fullcanvas) { canvas.width = fullcanvas.stream.width; canvas.height = fullcanvas.stream.height; } else if (remaining.length) { canvas.width = videosLength > 1 ? remaining[0].width * 2 : remaining[0].width; var height = 1; if (videosLength === 3 || videosLength === 4) { height = 2; } if (videosLength === 5 || videosLength === 6) { height = 3; } if (videosLength === 7 || videosLength === 8) { height = 4; } if (videosLength === 9 || videosLength === 10) { height = 5; } canvas.height = remaining[0].height * height; } else { canvas.width = self.width || 360; canvas.height = self.height || 240; } if (fullcanvas && fullcanvas instanceof HTMLVideoElement) { drawImage(fullcanvas); } remaining.forEach(function(video, idx) { drawImage(video, idx); }); setTimeout(drawVideosToCanvas, self.frameInterval); } function drawImage(video, idx) { if (isStopDrawingFrames) { return; } var x = 0; var y = 0; var width = video.width; var height = video.height; if (idx === 1) { x = video.width; } if (idx === 2) { y = video.height; } if (idx === 3) { x = video.width; y = video.height; } if (idx === 4) { y = video.height * 2; } if (idx === 5) { x = video.width; y = video.height * 2; } if (idx === 6) { y = video.height * 3; } if (idx === 7) { x = video.width; y = video.height * 3; } if (typeof video.stream.left !== 'undefined') { x = video.stream.left; } if (typeof video.stream.top !== 'undefined') { y = video.stream.top; } if (typeof video.stream.width !== 'undefined') { width = video.stream.width; } if (typeof video.stream.height !== 'undefined') { height = video.stream.height; } context.drawImage(video, x, y, width, height); if (typeof video.stream.onRender === 'function') { video.stream.onRender(context, x, y, width, height, idx); } } function getMixedStream() { isStopDrawingFrames = false; var mixedVideoStream = getMixedVideoStream(); var mixedAudioStream = getMixedAudioStream(); if (mixedAudioStream) { mixedAudioStream.getTracks().filter(function(t) { return t.kind === 'audio'; }).forEach(function(track) { mixedVideoStream.addTrack(track); }); } arrayOfMediaStreams.forEach(function(stream) { if (stream.fullcanvas) ; }); // mixedVideoStream.prototype.appendStreams = appendStreams; // mixedVideoStream.prototype.resetVideoStreams = resetVideoStreams; // mixedVideoStream.prototype.clearRecordedData = clearRecordedData; return mixedVideoStream; } function getMixedVideoStream() { resetVideoStreams(); var capturedStream; if ('captureStream' in canvas) { capturedStream = canvas.captureStream(); } else if ('mozCaptureStream' in canvas) { capturedStream = canvas.mozCaptureStream(); } else if (!self.disableLogs) { console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features'); } var videoStream = new MediaStream(); capturedStream.getTracks().filter(function(t) { return t.kind === 'video'; }).forEach(function(track) { videoStream.addTrack(track); }); canvas.stream = videoStream; return videoStream; } function getMixedAudioStream() { // via: @pehrsons if (!Storage.AudioContextConstructor) { Storage.AudioContextConstructor = new Storage.AudioContext(); } self.audioContext = Storage.AudioContextConstructor; self.audioSources = []; if (self.useGainNode === true) { self.gainNode = self.audioContext.createGain(); self.gainNode.connect(self.audioContext.destination); self.gainNode.gain.value = 0; // don't hear self } var audioTracksLength = 0; arrayOfMediaStreams.forEach(function(stream) { if (!stream.getTracks().filter(function(t) { return t.kind === 'audio'; }).length) { return; } audioTracksLength++; var audioSource = self.audioContext.createMediaStreamSource(stream); if (self.useGainNode === true) { audioSource.connect(self.gainNode); } self.audioSources.push(audioSource); }); if (!audioTracksLength) { // because "self.audioContext" is not initialized // that's why we've to ignore rest of the code return; } self.audioDestination = self.audioContext.createMediaStreamDestination(); self.audioSources.forEach(function(audioSource) { audioSource.connect(self.audioDestination); }); return self.audioDestination.stream; } function getVideo(stream) { var video = document.createElement('video'); setSrcObject(stream, video); video.className = elementClass; video.muted = true; video.volume = 0; video.width = stream.width || self.width || 360; video.height = stream.height || self.height || 240; video.play(); return video; } this.appendStreams = function(streams) { if (!streams) { throw 'First parameter is required.'; } if (!(streams instanceof Array)) { streams = [streams]; } streams.forEach(function(stream) { var newStream = new MediaStream(); if (stream.getTracks().filter(function(t) { return t.kind === 'video'; }).length) { var video = getVideo(stream); video.stream = stream; videos.push(video); newStream.addTrack(stream.getTracks().filter(function(t) { return t.kind === 'video'; })[0]); } if (stream.getTracks().filter(function(t) { return t.kind === 'audio'; }).length) { var audioSource = self.audioContext.createMediaStreamSource(stream); self.audioDestination = self.audioContext.createMediaStreamDestination(); audioSource.connect(self.audioDestination); newStream.addTrack(self.audioDestination.stream.getTracks().filter(function(t) { return t.kind === 'audio'; })[0]); } arrayOfMediaStreams.push(newStream); }); }; this.releaseStreams = function() { videos = []; isStopDrawingFrames = true; if (self.gainNode) { self.gainNode.disconnect(); self.gainNode = null; } if (self.audioSources.length) { self.audioSources.forEach(function(source) { source.disconnect(); }); self.audioSources = []; } if (self.audioDestination) { self.audioDestination.disconnect(); self.audioDestination = null; } if (self.audioContext) { self.audioContext.close(); } self.audioContext = null; context.clearRect(0, 0, canvas.width, canvas.height); if (canvas.stream) { canvas.stream.stop(); canvas.stream = null; } }; this.resetVideoStreams = function(streams) { if (streams && !(streams instanceof Array)) { streams = [streams]; } resetVideoStreams(streams); }; function resetVideoStreams(streams) { videos = []; streams = streams || arrayOfMediaStreams; // via: @adrian-ber streams.forEach(function(stream) { if (!stream.getTracks().filter(function(t) { return t.kind === 'video'; }).length) { return; } var video = getVideo(stream); video.stream = stream; videos.push(video); }); } // for debugging this.name = 'MultiStreamsMixer'; this.toString = function() { return this.name; }; this.getMixedStream = getMixedStream; } if (typeof RecordRTC === 'undefined') { { module.exports = MultiStreamsMixer; } } // ______________________ // MultiStreamRecorder.js /* * Video conference recording, using captureStream API along with WebAudio and Canvas2D API. */ /** * MultiStreamRecorder can record multiple videos in single container. * @summary Multi-videos recorder. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef MultiStreamRecorder * @class * @example * var options = { * mimeType: 'video/webm' * } * var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * * // or * var blob = recorder.blob; * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStreams} mediaStreams - Array of MediaStreams. * @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"} */ function MultiStreamRecorder(arrayOfMediaStreams, options) { arrayOfMediaStreams = arrayOfMediaStreams || []; var self = this; var mixer; var mediaRecorder; options = options || { elementClass: 'multi-streams-mixer', mimeType: 'video/webm', video: { width: 360, height: 240 } }; if (!options.frameInterval) { options.frameInterval = 10; } if (!options.video) { options.video = {}; } if (!options.video.width) { options.video.width = 360; } if (!options.video.height) { options.video.height = 240; } /** * This method records all MediaStreams. * @method * @memberof MultiStreamRecorder * @example * recorder.record(); */ this.record = function() { // github/muaz-khan/MultiStreamsMixer mixer = new MultiStreamsMixer(arrayOfMediaStreams, options.elementClass || 'multi-streams-mixer'); if (getAllVideoTracks().length) { mixer.frameInterval = options.frameInterval || 10; mixer.width = options.video.width || 360; mixer.height = options.video.height || 240; mixer.startDrawingFrames(); } if (options.previewStream && typeof options.previewStream === 'function') { options.previewStream(mixer.getMixedStream()); } // record using MediaRecorder API mediaRecorder = new MediaStreamRecorder(mixer.getMixedStream(), options); mediaRecorder.record(); }; function getAllVideoTracks() { var tracks = []; arrayOfMediaStreams.forEach(function(stream) { getTracks(stream, 'video').forEach(function(track) { tracks.push(track); }); }); return tracks; } /** * This method stops recording MediaStream. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof MultiStreamRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { if (!mediaRecorder) { return; } mediaRecorder.stop(function(blob) { self.blob = blob; callback(blob); self.clearRecordedData(); }); }; /** * This method pauses the recording process. * @method * @memberof MultiStreamRecorder * @example * recorder.pause(); */ this.pause = function() { if (mediaRecorder) { mediaRecorder.pause(); } }; /** * This method resumes the recording process. * @method * @memberof MultiStreamRecorder * @example * recorder.resume(); */ this.resume = function() { if (mediaRecorder) { mediaRecorder.resume(); } }; /** * This method resets currently recorded data. * @method * @memberof MultiStreamRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { if (mediaRecorder) { mediaRecorder.clearRecordedData(); mediaRecorder = null; } if (mixer) { mixer.releaseStreams(); mixer = null; } }; /** * Add extra media-streams to existing recordings. * @method * @memberof MultiStreamRecorder * @param {MediaStreams} mediaStreams - Array of MediaStreams * @example * recorder.addStreams([newAudioStream, newVideoStream]); */ this.addStreams = function(streams) { if (!streams) { throw 'First parameter is required.'; } if (!(streams instanceof Array)) { streams = [streams]; } arrayOfMediaStreams.concat(streams); if (!mediaRecorder || !mixer) { return; } mixer.appendStreams(streams); if (options.previewStream && typeof options.previewStream === 'function') { options.previewStream(mixer.getMixedStream()); } }; /** * Reset videos during live recording. Replace old videos e.g. replace cameras with full-screen. * @method * @memberof MultiStreamRecorder * @param {MediaStreams} mediaStreams - Array of MediaStreams * @example * recorder.resetVideoStreams([newVideo1, newVideo2]); */ this.resetVideoStreams = function(streams) { if (!mixer) { return; } if (streams && !(streams instanceof Array)) { streams = [streams]; } mixer.resetVideoStreams(streams); }; /** * Returns MultiStreamsMixer * @method * @memberof MultiStreamRecorder * @example * let mixer = recorder.getMixer(); * mixer.appendStreams([newStream]); */ this.getMixer = function() { return mixer; }; // for debugging this.name = 'MultiStreamRecorder'; this.toString = function() { return this.name; }; } if (typeof RecordRTC !== 'undefined') { RecordRTC.MultiStreamRecorder = MultiStreamRecorder; } // _____________________ // RecordRTC.promises.js /** * RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here} * @summary Promises for {@link RecordRTC} * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef RecordRTCPromisesHandler * @class * @example * var recorder = new RecordRTCPromisesHandler(mediaStream, options); * recorder.startRecording() * .then(successCB) * .catch(errorCB); * // Note: You can access all RecordRTC API using "recorder.recordRTC" e.g. * recorder.recordRTC.onStateChanged = function(state) {}; * recorder.recordRTC.setRecordingDuration(5000); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc. * @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.} * @throws Will throw an error if "new" keyword is not used to initiate "RecordRTCPromisesHandler". Also throws error if first argument "MediaStream" is missing. * @requires {@link RecordRTC} */ function RecordRTCPromisesHandler(mediaStream, options) { if (!this) { throw 'Use "new RecordRTCPromisesHandler()"'; } if (typeof mediaStream === 'undefined') { throw 'First argument "MediaStream" is required.'; } var self = this; /** * @property {Blob} blob - Access/reach the native {@link RecordRTC} object. * @memberof RecordRTCPromisesHandler * @example * let internal = recorder.recordRTC.getInternalRecorder(); * alert(internal instanceof MediaStreamRecorder); * recorder.recordRTC.onStateChanged = function(state) {}; */ self.recordRTC = new RecordRTC(mediaStream, options); /** * This method records MediaStream. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.startRecording() * .then(successCB) * .catch(errorCB); */ this.startRecording = function() { return new Promise(function(resolve, reject) { try { self.recordRTC.startRecording(); resolve(); } catch (e) { reject(e); } }); }; /** * This method stops the recording. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.stopRecording().then(function() { * var blob = recorder.getBlob(); * }).catch(errorCB); */ this.stopRecording = function() { return new Promise(function(resolve, reject) { try { self.recordRTC.stopRecording(function(url) { self.blob = self.recordRTC.getBlob(); if (!self.blob || !self.blob.size) { reject('Empty blob.', self.blob); return; } resolve(url); }); } catch (e) { reject(e); } }); }; /** * This method pauses the recording. You can resume recording using "resumeRecording" method. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.pauseRecording() * .then(successCB) * .catch(errorCB); */ this.pauseRecording = function() { return new Promise(function(resolve, reject) { try { self.recordRTC.pauseRecording(); resolve(); } catch (e) { reject(e); } }); }; /** * This method resumes the recording. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.resumeRecording() * .then(successCB) * .catch(errorCB); */ this.resumeRecording = function() { return new Promise(function(resolve, reject) { try { self.recordRTC.resumeRecording(); resolve(); } catch (e) { reject(e); } }); }; /** * This method returns data-url for the recorded blob. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.stopRecording().then(function() { * recorder.getDataURL().then(function(dataURL) { * window.open(dataURL); * }).catch(errorCB);; * }).catch(errorCB); */ this.getDataURL = function(callback) { return new Promise(function(resolve, reject) { try { self.recordRTC.getDataURL(function(dataURL) { resolve(dataURL); }); } catch (e) { reject(e); } }); }; /** * This method returns the recorded blob. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.stopRecording().then(function() { * recorder.getBlob().then(function(blob) {}) * }).catch(errorCB); */ this.getBlob = function() { return new Promise(function(resolve, reject) { try { resolve(self.recordRTC.getBlob()); } catch (e) { reject(e); } }); }; /** * This method returns the internal recording object. * @method * @memberof RecordRTCPromisesHandler * @example * let internalRecorder = await recorder.getInternalRecorder(); * if(internalRecorder instanceof MultiStreamRecorder) { * internalRecorder.addStreams([newAudioStream]); * internalRecorder.resetVideoStreams([screenStream]); * } * @returns {Object} */ this.getInternalRecorder = function() { return new Promise(function(resolve, reject) { try { resolve(self.recordRTC.getInternalRecorder()); } catch (e) { reject(e); } }); }; /** * This method resets the recorder. So that you can reuse single recorder instance many times. * @method * @memberof RecordRTCPromisesHandler * @example * await recorder.reset(); * recorder.startRecording(); // record again */ this.reset = function() { return new Promise(function(resolve, reject) { try { resolve(self.recordRTC.reset()); } catch (e) { reject(e); } }); }; /** * Destroy RecordRTC instance. Clear all recorders and objects. * @method * @memberof RecordRTCPromisesHandler * @example * recorder.destroy().then(successCB).catch(errorCB); */ this.destroy = function() { return new Promise(function(resolve, reject) { try { resolve(self.recordRTC.destroy()); } catch (e) { reject(e); } }); }; /** * Get recorder's readonly state. * @method * @memberof RecordRTCPromisesHandler * @example * let state = await recorder.getState(); * // or * recorder.getState().then(state => { console.log(state); }) * @returns {String} Returns recording state. */ this.getState = function() { return new Promise(function(resolve, reject) { try { resolve(self.recordRTC.getState()); } catch (e) { reject(e); } }); }; /** * @property {Blob} blob - Recorded data as "Blob" object. * @memberof RecordRTCPromisesHandler * @example * await recorder.stopRecording(); * let blob = recorder.getBlob(); // or "recorder.recordRTC.blob" * invokeSaveAsDialog(blob); */ this.blob = null; /** * RecordRTC version number * @property {String} version - Release version number. * @memberof RecordRTCPromisesHandler * @static * @readonly * @example * alert(recorder.version); */ this.version = '5.6.2'; } if (typeof RecordRTC !== 'undefined') { RecordRTC.RecordRTCPromisesHandler = RecordRTCPromisesHandler; } // ______________________ // WebAssemblyRecorder.js /** * WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos. * @summary Video recording feature in Chrome, Firefox and maybe Edge. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT} * @author {@link https://MuazKhan.com|Muaz Khan} * @typedef WebAssemblyRecorder * @class * @example * var recorder = new WebAssemblyRecorder(mediaStream); * recorder.record(); * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API. * @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024, realtime: true} */ function WebAssemblyRecorder(stream, config) { // based on: github.com/GoogleChromeLabs/webm-wasm if (typeof ReadableStream === 'undefined' || typeof WritableStream === 'undefined') { // because it fixes readable/writable streams issues console.error('Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js'); } config = config || {}; config.width = config.width || 640; config.height = config.height || 480; config.frameRate = config.frameRate || 30; config.bitrate = config.bitrate || 1200; config.realtime = config.realtime || true; var finished; function cameraStream() { return new ReadableStream({ start: function(controller) { var cvs = document.createElement('canvas'); var video = document.createElement('video'); var first = true; video.srcObject = stream; video.muted = true; video.height = config.height; video.width = config.width; video.volume = 0; video.onplaying = function() { cvs.width = config.width; cvs.height = config.height; var ctx = cvs.getContext('2d'); var frameTimeout = 1000 / config.frameRate; var cameraTimer = setInterval(function f() { if (finished) { clearInterval(cameraTimer); controller.close(); } if (first) { first = false; if (config.onVideoProcessStarted) { config.onVideoProcessStarted(); } } ctx.drawImage(video, 0, 0); if (controller._controlledReadableStream.state !== 'closed') { try { controller.enqueue( ctx.getImageData(0, 0, config.width, config.height) ); } catch (e) {} } }, frameTimeout); }; video.play(); } }); } var worker; function startRecording(stream, buffer) { if (!config.workerPath && !buffer) { finished = false; // is it safe to use @latest ? fetch( 'https://unpkg.com/webm-wasm@latest/dist/webm-worker.js' ).then(function(r) { r.arrayBuffer().then(function(buffer) { startRecording(stream, buffer); }); }); return; } if (!config.workerPath && buffer instanceof ArrayBuffer) { var blob = new Blob([buffer], { type: 'text/javascript' }); config.workerPath = URL.createObjectURL(blob); } if (!config.workerPath) { console.error('workerPath parameter is missing.'); } worker = new Worker(config.workerPath); worker.postMessage(config.webAssemblyPath || 'https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm'); worker.addEventListener('message', function(event) { if (event.data === 'READY') { worker.postMessage({ width: config.width, height: config.height, bitrate: config.bitrate || 1200, timebaseDen: config.frameRate || 30, realtime: config.realtime }); cameraStream().pipeTo(new WritableStream({ write: function(image) { if (finished) { console.error('Got image, but recorder is finished!'); return; } worker.postMessage(image.data.buffer, [image.data.buffer]); } })); } else if (!!event.data) { if (!isPaused) { arrayOfBuffers.push(event.data); } } }); } /** * This method records video. * @method * @memberof WebAssemblyRecorder * @example * recorder.record(); */ this.record = function() { arrayOfBuffers = []; isPaused = false; this.blob = null; startRecording(stream); if (typeof config.initCallback === 'function') { config.initCallback(); } }; var isPaused; /** * This method pauses the recording process. * @method * @memberof WebAssemblyRecorder * @example * recorder.pause(); */ this.pause = function() { isPaused = true; }; /** * This method resumes the recording process. * @method * @memberof WebAssemblyRecorder * @example * recorder.resume(); */ this.resume = function() { isPaused = false; }; function terminate(callback) { if (!worker) { if (callback) { callback(); } return; } // Wait for null event data to indicate that the encoding is complete worker.addEventListener('message', function(event) { if (event.data === null) { worker.terminate(); worker = null; if (callback) { callback(); } } }); worker.postMessage(null); } var arrayOfBuffers = []; /** * This method stops recording video. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof WebAssemblyRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { finished = true; var recorder = this; terminate(function() { recorder.blob = new Blob(arrayOfBuffers, { type: 'video/webm' }); callback(recorder.blob); }); }; // for debugging this.name = 'WebAssemblyRecorder'; this.toString = function() { return this.name; }; /** * This method resets currently recorded data. * @method * @memberof WebAssemblyRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { arrayOfBuffers = []; isPaused = false; this.blob = null; // todo: if recording-ON then STOP it first }; /** * @property {Blob} blob - The recorded blob object. * @memberof WebAssemblyRecorder * @example * recorder.stop(function(){ * var blob = recorder.blob; * }); */ this.blob = null; } if (typeof RecordRTC !== 'undefined') { RecordRTC.WebAssemblyRecorder = WebAssemblyRecorder; } }); /* * Copyright (C) 2016 Bilibili. All Rights Reserved. * * @author zheng qian * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ class SPSParser { static _ebsp2rbsp(uint8array) { let src = uint8array; let src_length = src.byteLength; let dst = new Uint8Array(src_length); let dst_idx = 0; for (let i = 0; i < src_length; i++) { if (i >= 2) { // Unescape: Skip 0x03 after 00 00 if (src[i] === 0x03 && src[i - 1] === 0x00 && src[i - 2] === 0x00) { continue; } } dst[dst_idx] = src[i]; dst_idx++; } return new Uint8Array(dst.buffer, 0, dst_idx); } // 解析 SPS // https://zhuanlan.zhihu.com/p/27896239 static parseSPS(uint8array) { let rbsp = SPSParser._ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); gb.readByte(); // 标识当前H.264码流的profile。 // 我们知道,H.264中定义了三种常用的档次profile: 基准档次:baseline profile;主要档次:main profile; 扩展档次:extended profile; let profile_idc = gb.readByte(); // profile_idc gb.readByte(); // constraint_set_flags[5] + reserved_zero[3] // 标识当前码流的Level。编码的Level定义了某种条件下的最大视频分辨率、最大视频帧率等参数,码流所遵从的level由level_idc指定。 let level_idc = gb.readByte(); // level_idc // 表示当前的序列参数集的id。通过该id值,图像参数集pps可以引用其代表的sps中的参数。 gb.readUEG(); // seq_parameter_set_id let profile_string = SPSParser.getProfileString(profile_idc); let level_string = SPSParser.getLevelString(level_idc); let chroma_format_idc = 1; let chroma_format = 420; let chroma_format_table = [0, 420, 422, 444]; let bit_depth = 8; // if (profile_idc === 100 || profile_idc === 110 || profile_idc === 122 || profile_idc === 244 || profile_idc === 44 || profile_idc === 83 || profile_idc === 86 || profile_idc === 118 || profile_idc === 128 || profile_idc === 138 || profile_idc === 144) { // chroma_format_idc = gb.readUEG(); if (chroma_format_idc === 3) { gb.readBits(1); // separate_colour_plane_flag } if (chroma_format_idc <= 3) { chroma_format = chroma_format_table[chroma_format_idc]; } bit_depth = gb.readUEG() + 8; // bit_depth_luma_minus8 gb.readUEG(); // bit_depth_chroma_minus8 gb.readBits(1); // qpprime_y_zero_transform_bypass_flag if (gb.readBool()) { // seq_scaling_matrix_present_flag let scaling_list_count = chroma_format_idc !== 3 ? 8 : 12; for (let i = 0; i < scaling_list_count; i++) { if (gb.readBool()) { // seq_scaling_list_present_flag if (i < 6) { SPSParser._skipScalingList(gb, 16); } else { SPSParser._skipScalingList(gb, 64); } } } } } // 用于计算MaxFrameNum的值。计算公式为MaxFrameNum = 2^(log2_max_frame_num_minus4 + gb.readUEG(); // log2_max_frame_num_minus4 // 表示解码picture order count(POC)的方法。POC是另一种计量图像序号的方式,与frame_num有着不同的计算方法。该语法元素的取值为0、1或2。 let pic_order_cnt_type = gb.readUEG(); if (pic_order_cnt_type === 0) { gb.readUEG(); // log2_max_pic_order_cnt_lsb_minus_4 } else if (pic_order_cnt_type === 1) { gb.readBits(1); // delta_pic_order_always_zero_flag gb.readSEG(); // offset_for_non_ref_pic gb.readSEG(); // offset_for_top_to_bottom_field let num_ref_frames_in_pic_order_cnt_cycle = gb.readUEG(); for (let i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) { gb.readSEG(); // offset_for_ref_frame } } // 用于表示参考帧的最大数目。 let ref_frames = gb.readUEG(); // max_num_ref_frames // 标识位,说明frame_num中是否允许不连续的值。 gb.readBits(1); // gaps_in_frame_num_value_allowed_flag // 用于计算图像的宽度。单位为宏块个数,因此图像的实际宽度为: let pic_width_in_mbs_minus1 = gb.readUEG(); // 使用PicHeightInMapUnits来度量视频中一帧图像的高度。 // PicHeightInMapUnits并非图像明确的以像素或宏块为单位的高度,而需要考虑该宏块是帧编码或场编码。PicHeightInMapUnits的计算方式为: let pic_height_in_map_units_minus1 = gb.readUEG(); // 标识位,说明宏块的编码方式。当该标识位为0时,宏块可能为帧编码或场编码; // 该标识位为1时,所有宏块都采用帧编码。根据该标识位取值不同,PicHeightInMapUnits的含义也不同, // 为0时表示一场数据按宏块计算的高度,为1时表示一帧数据按宏块计算的高度。 let frame_mbs_only_flag = gb.readBits(1); if (frame_mbs_only_flag === 0) { // 标识位,说明是否采用了宏块级的帧场自适应编码。当该标识位为0时,不存在帧编码和场编码之间的切换;当标识位为1时,宏块可能在帧编码和场编码模式之间进行选择。 gb.readBits(1); // mb_adaptive_frame_field_flag } // 标识位,用于B_Skip、B_Direct模式运动矢量的推导计算。 gb.readBits(1); // direct_8x8_inference_flag let frame_crop_left_offset = 0; let frame_crop_right_offset = 0; let frame_crop_top_offset = 0; let frame_crop_bottom_offset = 0; let frame_cropping_flag = gb.readBool(); if (frame_cropping_flag) { frame_crop_left_offset = gb.readUEG(); frame_crop_right_offset = gb.readUEG(); frame_crop_top_offset = gb.readUEG(); frame_crop_bottom_offset = gb.readUEG(); } let sar_width = 1, sar_height = 1; let fps = 0, fps_fixed = true, fps_num = 0, fps_den = 0; // 标识位,说明SPS中是否存在VUI信息。 let vui_parameters_present_flag = gb.readBool(); if (vui_parameters_present_flag) { if (gb.readBool()) { // aspect_ratio_info_present_flag let aspect_ratio_idc = gb.readByte(); let sar_w_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2]; let sar_h_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1]; if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) { sar_width = sar_w_table[aspect_ratio_idc - 1]; sar_height = sar_h_table[aspect_ratio_idc - 1]; } else if (aspect_ratio_idc === 255) { sar_width = gb.readByte() << 8 | gb.readByte(); sar_height = gb.readByte() << 8 | gb.readByte(); } } if (gb.readBool()) { // overscan_info_present_flag gb.readBool(); // overscan_appropriate_flag } if (gb.readBool()) { // video_signal_type_present_flag gb.readBits(4); // video_format & video_full_range_flag if (gb.readBool()) { // colour_description_present_flag gb.readBits(24); // colour_primaries & transfer_characteristics & matrix_coefficients } } if (gb.readBool()) { // chroma_loc_info_present_flag gb.readUEG(); // chroma_sample_loc_type_top_field gb.readUEG(); // chroma_sample_loc_type_bottom_field } if (gb.readBool()) { // timing_info_present_flag let num_units_in_tick = gb.readBits(32); let time_scale = gb.readBits(32); fps_fixed = gb.readBool(); // fixed_frame_rate_flag fps_num = time_scale; fps_den = num_units_in_tick * 2; fps = fps_num / fps_den; } } let sarScale = 1; if (sar_width !== 1 || sar_height !== 1) { sarScale = sar_width / sar_height; } let crop_unit_x = 0, crop_unit_y = 0; if (chroma_format_idc === 0) { crop_unit_x = 1; crop_unit_y = 2 - frame_mbs_only_flag; } else { let sub_wc = chroma_format_idc === 3 ? 1 : 2; let sub_hc = chroma_format_idc === 1 ? 2 : 1; crop_unit_x = sub_wc; crop_unit_y = sub_hc * (2 - frame_mbs_only_flag); } let codec_width = (pic_width_in_mbs_minus1 + 1) * 16; let codec_height = (2 - frame_mbs_only_flag) * ((pic_height_in_map_units_minus1 + 1) * 16); codec_width -= (frame_crop_left_offset + frame_crop_right_offset) * crop_unit_x; codec_height -= (frame_crop_top_offset + frame_crop_bottom_offset) * crop_unit_y; let present_width = Math.ceil(codec_width * sarScale); gb.destroy(); gb = null; // 解析出来的SPS 内容。 return { profile_string: profile_string, // baseline, high, high10, ... level_string: level_string, // 3, 3.1, 4, 4.1, 5, 5.1, ... bit_depth: bit_depth, // 8bit, 10bit, ... ref_frames: ref_frames, chroma_format: chroma_format, // 4:2:0, 4:2:2, ... chroma_format_string: SPSParser.getChromaFormatString(chroma_format), frame_rate: { fixed: fps_fixed, fps: fps, fps_den: fps_den, fps_num: fps_num }, sar_ratio: { width: sar_width, height: sar_height }, codec_size: { width: codec_width, height: codec_height }, present_size: { width: present_width, height: codec_height } }; } static parseSPS$2(uint8array) { let codec_array = uint8array.subarray(1, 4); let codec_mimetype = 'avc1.'; for (let j = 0; j < 3; j++) { let h = codec_array[j].toString(16); if (h.length < 2) { h = '0' + h; } codec_mimetype += h; } let rbsp = SPSParser._ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); gb.readByte(); let profile_idc = gb.readByte(); // profile_idc gb.readByte(); // constraint_set_flags[5] + reserved_zero[3] let level_idc = gb.readByte(); // level_idc gb.readUEG(); // seq_parameter_set_id let profile_string = SPSParser.getProfileString(profile_idc); let level_string = SPSParser.getLevelString(level_idc); let chroma_format_idc = 1; let chroma_format = 420; let chroma_format_table = [0, 420, 422, 444]; let bit_depth_luma = 8; let bit_depth_chroma = 8; if (profile_idc === 100 || profile_idc === 110 || profile_idc === 122 || profile_idc === 244 || profile_idc === 44 || profile_idc === 83 || profile_idc === 86 || profile_idc === 118 || profile_idc === 128 || profile_idc === 138 || profile_idc === 144) { chroma_format_idc = gb.readUEG(); if (chroma_format_idc === 3) { gb.readBits(1); // separate_colour_plane_flag } if (chroma_format_idc <= 3) { chroma_format = chroma_format_table[chroma_format_idc]; } bit_depth_luma = gb.readUEG() + 8; // bit_depth_luma_minus8 bit_depth_chroma = gb.readUEG() + 8; // bit_depth_chroma_minus8 gb.readBits(1); // qpprime_y_zero_transform_bypass_flag if (gb.readBool()) { // seq_scaling_matrix_present_flag let scaling_list_count = chroma_format_idc !== 3 ? 8 : 12; for (let i = 0; i < scaling_list_count; i++) { if (gb.readBool()) { // seq_scaling_list_present_flag if (i < 6) { SPSParser._skipScalingList(gb, 16); } else { SPSParser._skipScalingList(gb, 64); } } } } } gb.readUEG(); // log2_max_frame_num_minus4 let pic_order_cnt_type = gb.readUEG(); if (pic_order_cnt_type === 0) { gb.readUEG(); // log2_max_pic_order_cnt_lsb_minus_4 } else if (pic_order_cnt_type === 1) { gb.readBits(1); // delta_pic_order_always_zero_flag gb.readSEG(); // offset_for_non_ref_pic gb.readSEG(); // offset_for_top_to_bottom_field let num_ref_frames_in_pic_order_cnt_cycle = gb.readUEG(); for (let i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) { gb.readSEG(); // offset_for_ref_frame } } let ref_frames = gb.readUEG(); // max_num_ref_frames gb.readBits(1); // gaps_in_frame_num_value_allowed_flag let pic_width_in_mbs_minus1 = gb.readUEG(); let pic_height_in_map_units_minus1 = gb.readUEG(); let frame_mbs_only_flag = gb.readBits(1); if (frame_mbs_only_flag === 0) { gb.readBits(1); // mb_adaptive_frame_field_flag } gb.readBits(1); // direct_8x8_inference_flag let frame_crop_left_offset = 0; let frame_crop_right_offset = 0; let frame_crop_top_offset = 0; let frame_crop_bottom_offset = 0; let frame_cropping_flag = gb.readBool(); if (frame_cropping_flag) { frame_crop_left_offset = gb.readUEG(); frame_crop_right_offset = gb.readUEG(); frame_crop_top_offset = gb.readUEG(); frame_crop_bottom_offset = gb.readUEG(); } let sar_width = 1, sar_height = 1; let fps = 0, fps_fixed = true, fps_num = 0, fps_den = 0; let vui_parameters_present_flag = gb.readBool(); if (vui_parameters_present_flag) { if (gb.readBool()) { // aspect_ratio_info_present_flag let aspect_ratio_idc = gb.readByte(); let sar_w_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2]; let sar_h_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1]; if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) { sar_width = sar_w_table[aspect_ratio_idc - 1]; sar_height = sar_h_table[aspect_ratio_idc - 1]; } else if (aspect_ratio_idc === 255) { sar_width = gb.readByte() << 8 | gb.readByte(); sar_height = gb.readByte() << 8 | gb.readByte(); } } if (gb.readBool()) { // overscan_info_present_flag gb.readBool(); // overscan_appropriate_flag } if (gb.readBool()) { // video_signal_type_present_flag gb.readBits(4); // video_format & video_full_range_flag if (gb.readBool()) { // colour_description_present_flag gb.readBits(24); // colour_primaries & transfer_characteristics & matrix_coefficients } } if (gb.readBool()) { // chroma_loc_info_present_flag gb.readUEG(); // chroma_sample_loc_type_top_field gb.readUEG(); // chroma_sample_loc_type_bottom_field } if (gb.readBool()) { // timing_info_present_flag let num_units_in_tick = gb.readBits(32); let time_scale = gb.readBits(32); fps_fixed = gb.readBool(); // fixed_frame_rate_flag fps_num = time_scale; fps_den = num_units_in_tick * 2; fps = fps_num / fps_den; } } let sarScale = 1; if (sar_width !== 1 || sar_height !== 1) { sarScale = sar_width / sar_height; } let crop_unit_x = 0, crop_unit_y = 0; if (chroma_format_idc === 0) { crop_unit_x = 1; crop_unit_y = 2 - frame_mbs_only_flag; } else { let sub_wc = chroma_format_idc === 3 ? 1 : 2; let sub_hc = chroma_format_idc === 1 ? 2 : 1; crop_unit_x = sub_wc; crop_unit_y = sub_hc * (2 - frame_mbs_only_flag); } let codec_width = (pic_width_in_mbs_minus1 + 1) * 16; let codec_height = (2 - frame_mbs_only_flag) * ((pic_height_in_map_units_minus1 + 1) * 16); codec_width -= (frame_crop_left_offset + frame_crop_right_offset) * crop_unit_x; codec_height -= (frame_crop_top_offset + frame_crop_bottom_offset) * crop_unit_y; let present_width = Math.ceil(codec_width * sarScale); gb.destroy(); gb = null; return { codec_mimetype, profile_idc, level_idc, profile_string, // baseline, high, high10, ... level_string, // 3, 3.1, 4, 4.1, 5, 5.1, ... chroma_format_idc, bit_depth: bit_depth_luma, // 8bit, 10bit, ... bit_depth_luma, bit_depth_chroma, ref_frames, chroma_format, // 4:2:0, 4:2:2, ... chroma_format_string: SPSParser.getChromaFormatString(chroma_format), frame_rate: { fixed: fps_fixed, fps: fps, fps_den: fps_den, fps_num: fps_num }, sar_ratio: { width: sar_width, height: sar_height }, codec_size: { width: codec_width, height: codec_height }, present_size: { width: present_width, height: codec_height } }; } static _skipScalingList(gb, count) { let last_scale = 8, next_scale = 8; let delta_scale = 0; for (let i = 0; i < count; i++) { if (next_scale !== 0) { delta_scale = gb.readSEG(); next_scale = (last_scale + delta_scale + 256) % 256; } last_scale = next_scale === 0 ? last_scale : next_scale; } } // profile_idc = 66 → baseline profile; // profile_idc = 77 → main profile; // profile_idc = 88 → extended profile; // 在新版的标准中,还包括了High、High 10、High 4:2:2、High 4:4:4、High 10 Intra、High // 4:2:2 Intra、High 4:4:4 Intra、CAVLC 4:4:4 Intra static getProfileString(profile_idc) { switch (profile_idc) { case 66: return 'Baseline'; case 77: return 'Main'; case 88: return 'Extended'; case 100: return 'High'; case 110: return 'High10'; case 122: return 'High422'; case 244: return 'High444'; default: return 'Unknown'; } } static getLevelString(level_idc) { return (level_idc / 10).toFixed(1); } static getChromaFormatString(chroma) { switch (chroma) { case 420: return '4:2:0'; case 422: return '4:2:2'; case 444: return '4:4:4'; default: return 'Unknown'; } } } class Bitop { constructor(buffer) { this.buffer = buffer; this.buflen = buffer.length; this.bufpos = 0; this.bufoff = 0; this.iserro = false; } read(n) { let v = 0; let d = 0; while (n) { if (n < 0 || this.bufpos >= this.buflen) { this.iserro = true; return 0; } this.iserro = false; d = this.bufoff + n > 8 ? 8 - this.bufoff : n; v <<= d; v += this.buffer[this.bufpos] >> 8 - this.bufoff - d & 0xff >> 8 - d; this.bufoff += d; n -= d; if (this.bufoff == 8) { this.bufpos++; this.bufoff = 0; } } return v; } look(n) { let p = this.bufpos; let o = this.bufoff; let v = this.read(n); this.bufpos = p; this.bufoff = o; return v; } read_golomb() { let n; for (n = 0; this.read(1) === 0 && !this.iserro; n++); return (1 << n) + this.read(n) - 1; } } // function parseAVCDecoderConfigurationRecord(arrayBuffer) { const meta = {}; let le = function () { let buf = new ArrayBuffer(2); new DataView(buf).setInt16(0, 256, true); // little-endian write return new Int16Array(buf)[0] === 256; // platform-spec read, if equal then LE }(); const v = new DataView(arrayBuffer.buffer); let version = v.getUint8(0); // configurationVersion let avcProfile = v.getUint8(1); // avcProfileIndication v.getUint8(2); // profile_compatibil v.getUint8(3); // AVCLevelIndication if (version !== 1 || avcProfile === 0) { // this._onError(DemuxErrors.FORMAT_ERROR, 'Flv: Invalid AVCDecoderConfigurationRecord'); return {}; } const _naluLengthSize = (v.getUint8(4) & 3) + 1; // lengthSizeMinusOne if (_naluLengthSize !== 3 && _naluLengthSize !== 4) { // holy shit!!! // this._onError(DemuxErrors.FORMAT_ERROR, `Flv: Strange NaluLengthSizeMinusOne: ${_naluLengthSize - 1}`); return {}; } let spsCount = v.getUint8(5) & 31; // numOfSequenceParameterSets if (spsCount === 0) { // this._onError(DemuxErrors.FORMAT_ERROR, 'Flv: Invalid AVCDecoderConfigurationRecord: No SPS'); return {}; } let offset = 6; for (let i = 0; i < spsCount; i++) { let len = v.getUint16(offset, !le); // sequenceParameterSetLength offset += 2; if (len === 0) { continue; } // Notice: Nalu without startcode header (00 00 00 01) let sps = new Uint8Array(arrayBuffer.buffer, offset, len); offset += len; // flv.js作者选择了自己来解析这个数据结构,也是迫不得已,因为JS环境下没有ffmpeg,解析这个结构主要是为了提取 sps和pps。虽然理论上sps允许有多个,但其实一般就一个。 // packetTtype 为 1 表示 NALU,NALU= network abstract layer unit,这是H.264的概念,网络抽象层数据单元,其实简单理解就是一帧视频数据。 // pps的信息没什么用,所以作者只实现了sps的分析器,说明作者下了很大功夫去学习264的标准,其中的Golomb解码还是挺复杂的,能解对不容易,我在PC和手机平台都是用ffmpeg去解析的。 // SPS里面包括了视频分辨率,帧率,profile level等视频重要信息。 let config = SPSParser.parseSPS(sps); // console.log('h264 sps config',config) if (i !== 0) { // ignore other sps's config continue; } meta.sps = sps; meta.timescale = 1000; meta.codecWidth = config.codec_size.width; meta.codecHeight = config.codec_size.height; meta.presentWidth = config.present_size.width; meta.presentHeight = config.present_size.height; meta.profile = config.profile_string; meta.level = config.level_string; meta.bitDepth = config.bit_depth; meta.chromaFormat = config.chroma_format; meta.sarRatio = config.sar_ratio; meta.frameRate = config.frame_rate; if (config.frame_rate.fixed === false || config.frame_rate.fps_num === 0 || config.frame_rate.fps_den === 0) { meta.frameRate = { fixed: true, // fps: 23.976, // fps_num: 23976, fps: 25, fps_num: 25000, fps_den: 1000 }; } let fps_den = meta.frameRate.fps_den; let fps_num = meta.frameRate.fps_num; meta.refSampleDuration = meta.timescale * (fps_den / fps_num); let codecArray = sps.subarray(1, 4); let codecString = 'avc1.'; for (let j = 0; j < 3; j++) { let h = codecArray[j].toString(16); if (h.length < 2) { h = '0' + h; } codecString += h; } // codec meta.codec = codecString; } let ppsCount = v.getUint8(offset); // numOfPictureParameterSets if (ppsCount === 0) { // this._onError(DemuxErrors.FORMAT_ERROR, 'Flv: Invalid AVCDecoderConfigurationRecord: No PPS'); return {}; } offset++; for (let i = 0; i < ppsCount; i++) { let len = v.getUint16(offset, !le); // pictureParameterSetLength offset += 2; if (len === 0) { continue; } let pps = new Uint8Array(arrayBuffer.buffer, offset, len); // pps is useless for extracting video information offset += len; meta.pps = pps; } meta.videoType = VIDEO_ENCODE_TYPE.h264; if (meta.sps) { const spsLength = meta.sps.byteLength; const spsFlag = new Uint8Array([spsLength >>> 24 & 0xFF, spsLength >>> 16 & 0xFF, spsLength >>> 8 & 0xFF, spsLength & 0xFF]); const sps = new Uint8Array(spsLength + 4); sps.set(spsFlag, 0); sps.set(meta.sps, 4); meta.sps = sps; } if (meta.pps) { const ppsLength = meta.pps.byteLength; const ppsFlag = new Uint8Array([ppsLength >>> 24 & 0xFF, ppsLength >>> 16 & 0xFF, ppsLength >>> 8 & 0xFF, ppsLength & 0xFF]); const pps = new Uint8Array(ppsLength + 4); pps.set(ppsFlag, 0); pps.set(meta.pps, 4); meta.pps = pps; } // meta.avcc = arrayBuffer; return meta; } function avcEncoderConfigurationRecord$2(_ref2) { let { sps, pps } = _ref2; // require Nalu without 4 byte length-header let length = 6 + 2 + sps.byteLength + 1 + 2 + pps.byteLength; let need_extra_fields = false; const sps_details = SPSParser.parseSPS$2(sps); if (sps[3] !== 66 && sps[3] !== 77 && sps[3] !== 88) { need_extra_fields = true; length += 4; } let data = new Uint8Array(length); data[0] = 0x01; // configurationVersion data[1] = sps[1]; // AVCProfileIndication data[2] = sps[2]; // profile_compatibility data[3] = sps[3]; // AVCLevelIndication data[4] = 0xFF; // 111111 + lengthSizeMinusOne(3) data[5] = 0xE0 | 0x01; // 111 + numOfSequenceParameterSets let sps_length = sps.byteLength; data[6] = sps_length >>> 8; // sequenceParameterSetLength data[7] = sps_length & 0xFF; let offset = 8; data.set(sps, 8); offset += sps_length; data[offset] = 1; // numOfPictureParameterSets let pps_length = pps.byteLength; data[offset + 1] = pps_length >>> 8; // pictureParameterSetLength data[offset + 2] = pps_length & 0xFF; data.set(pps, offset + 3); offset += 3 + pps_length; if (need_extra_fields) { data[offset] = 0xFC | sps_details.chroma_format_idc; data[offset + 1] = 0xF8 | sps_details.bit_depth_luma - 8; data[offset + 2] = 0xF8 | sps_details.bit_depth_chroma - 8; data[offset + 3] = 0x00; // number of sps ext offset += 4; } const prevData = [0x17, 0x00, 0x00, 0x00, 0x00]; const newData = new Uint8Array(prevData.length + data.byteLength); newData.set(prevData, 0); newData.set(data, prevData.length); return newData; } /** * * @param oneNALBuffer * @param isIframe * @returns {Uint8Array} */ function avcEncoderNalePacket(oneNALBuffer, isIframe) { // 正常发送nal const idrBit = 0x10 | 7; const nIdrBit = 0x20 | 7; let tmp = []; if (isIframe) { tmp[0] = idrBit; } else { tmp[0] = nIdrBit; } // compositionTime tmp[1] = 1; tmp[2] = 0; tmp[3] = 0; tmp[4] = 0; // tmp[5] = oneNALBuffer.byteLength >> 24 & 0xff; tmp[6] = oneNALBuffer.byteLength >> 16 & 0xff; tmp[7] = oneNALBuffer.byteLength >> 8 & 0xff; tmp[8] = oneNALBuffer.byteLength & 0xff; const arrayBuffer = new Uint8Array(tmp.length + oneNALBuffer.byteLength); arrayBuffer.set(tmp, 0); arrayBuffer.set(oneNALBuffer, tmp.length); return arrayBuffer; } function avcEncoderNalePacketNotLength(oneNALBuffer, isIframe) { // 正常发送nal const idrBit = 0x10 | 7; const nIdrBit = 0x20 | 7; let tmp = []; if (isIframe) { tmp[0] = idrBit; } else { tmp[0] = nIdrBit; } // compositionTime tmp[1] = 1; tmp[2] = 0; tmp[3] = 0; tmp[4] = 0; const arrayBuffer = new Uint8Array(tmp.length + oneNALBuffer.byteLength); arrayBuffer.set(tmp, 0); arrayBuffer.set(oneNALBuffer, tmp.length); return arrayBuffer; } /** * (NALU类型 & 0001 1111) * @param nalu * @returns {number} */ function getAvcSeqHeadType(nalu) { return nalu[0] & 0b0001_1111; } function isAvcSeqHead(type) { return type === H264_NAL_TYPE.sps || type === H264_NAL_TYPE.pps; } function isHvcSEIType(type) { return type === H264_NAL_TYPE.kSliceSEI; } function isNotAvcSeqHead(type) { return !isAvcSeqHead(type) && !isHvcSEIType(type); } function isAvcNaluIFrame(type) { return type === H264_NAL_TYPE.iFrame; } function isSameAvcNaluType(naluList) { if (naluList.length === 0) { return false; } const type = getAvcSeqHeadType(naluList[0]); for (let i = 1; i < naluList.length; i++) { if (type !== getAvcSeqHeadType(naluList[i])) { return false; } } return true; } class H264AnnexBParser { constructor(data) { this.data = data; this.eofFlag = false; this.currentStartcodeOffset = this.findNextStartCodeOffset(0); if (this.eofFlag) { console.error('Could not find H264 startcode until payload end!'); } } findNextStartCodeOffset(start_offset) { let i = start_offset; let data = this.data; while (true) { if (i + 3 >= data.byteLength) { this.eofFlag = true; return data.byteLength; } // search 00 00 00 01 or 00 00 01 let uint32 = data[i + 0] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]; let uint24 = data[i + 0] << 16 | data[i + 1] << 8 | data[i + 2]; if (uint32 === 0x00000001 || uint24 === 0x000001) { return i; } else { i++; } } } readNextNaluPayload() { let data = this.data; let nalu_payload = null; while (nalu_payload == null) { if (this.eofFlag) { break; } // offset pointed to start code let startcode_offset = this.currentStartcodeOffset; // nalu payload start offset let offset = startcode_offset; let u32 = data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3]; if (u32 === 0x00000001) { offset += 4; } else { offset += 3; } let nalu_type = data[offset] & 0x1F; let forbidden_bit = (data[offset] & 0x80) >>> 7; let next_startcode_offset = this.findNextStartCodeOffset(offset); this.currentStartcodeOffset = next_startcode_offset; if (nalu_type >= H264_NAL_TYPE.kReserved0) { continue; } if (forbidden_bit !== 0) { // Log.e(this.TAG, `forbidden_bit near offset ${offset} should be 0 but has value ${forbidden_bit}`); continue; } let payload_data = data.subarray(offset, next_startcode_offset); nalu_payload = { type: nalu_type, data: payload_data }; } return nalu_payload; } } class H264NaluAVC1 { constructor(nalu) { let nalu_size = nalu.data.byteLength; this.type = nalu.type; this.data = new Uint8Array(4 + nalu_size); // 4 byte length-header + nalu payload let v = new DataView(this.data.buffer); // Fill 4 byte length-header v.setUint32(0, nalu_size); // Copy payload this.data.set(nalu.data, 4); } } const _ebsp2rbsp = uint8array => { let src = uint8array; let src_length = src.byteLength; let dst = new Uint8Array(src_length); let dst_idx = 0; for (let i = 0; i < src_length; i++) { if (i >= 2) { // Unescape: Skip 0x03 after 00 00 if (src[i] === 0x03 && src[i - 1] === 0x00 && src[i - 2] === 0x00) { continue; } } dst[dst_idx] = src[i]; dst_idx++; } return new Uint8Array(dst.buffer, 0, dst_idx); }; const getLevelString = level_idc => { return (level_idc / 30).toFixed(1); }; const getChromaFormatString = chroma_format_idc => { switch (chroma_format_idc) { case 0: return '4:0:0'; case 1: return '4:2:0'; case 2: return '4:2:2'; case 3: return '4:4:4'; default: return 'Unknown'; } }; const parseHevcSPS = uint8array => { let rbsp = _ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); let left_offset = 0, right_offset = 0, top_offset = 0, bottom_offset = 0; // SPS gb.readBits(4); let max_sub_layers_minus1 = gb.readBits(3); gb.readBool(); // profile_tier_level begin let general_profile_space = gb.readBits(2); let general_tier_flag = gb.readBool(); let general_profile_idc = gb.readBits(5); let general_profile_compatibility_flags_1 = gb.readByte(); let general_profile_compatibility_flags_2 = gb.readByte(); let general_profile_compatibility_flags_3 = gb.readByte(); let general_profile_compatibility_flags_4 = gb.readByte(); let general_constraint_indicator_flags_1 = gb.readByte(); let general_constraint_indicator_flags_2 = gb.readByte(); let general_constraint_indicator_flags_3 = gb.readByte(); let general_constraint_indicator_flags_4 = gb.readByte(); let general_constraint_indicator_flags_5 = gb.readByte(); let general_constraint_indicator_flags_6 = gb.readByte(); let general_level_idc = gb.readByte(); let sub_layer_profile_present_flag = []; let sub_layer_level_present_flag = []; for (let i = 0; i < max_sub_layers_minus1; i++) { sub_layer_profile_present_flag.push(gb.readBool()); sub_layer_level_present_flag.push(gb.readBool()); } if (max_sub_layers_minus1 > 0) { for (let i = max_sub_layers_minus1; i < 8; i++) { gb.readBits(2); } } for (let i = 0; i < max_sub_layers_minus1; i++) { if (sub_layer_profile_present_flag[i]) { gb.readByte(); // sub_layer_profile_space, sub_layer_tier_flag, sub_layer_profile_idc gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); // sub_layer_profile_compatibility_flag gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); } if (sub_layer_profile_present_flag[i]) { gb.readByte(); } } // profile_tier_level end gb.readUEG(); let chroma_format_idc = gb.readUEG(); if (chroma_format_idc == 3) { gb.readBits(1); // separate_colour_plane_flag } let pic_width_in_luma_samples = gb.readUEG(); let pic_height_in_luma_samples = gb.readUEG(); let conformance_window_flag = gb.readBool(); if (conformance_window_flag) { left_offset += gb.readUEG(); right_offset += gb.readUEG(); top_offset += gb.readUEG(); bottom_offset += gb.readUEG(); } let bit_depth_luma_minus8 = gb.readUEG(); let bit_depth_chroma_minus8 = gb.readUEG(); let log2_max_pic_order_cnt_lsb_minus4 = gb.readUEG(); let sub_layer_ordering_info_present_flag = gb.readBool(); for (let i = sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i <= max_sub_layers_minus1; i++) { gb.readUEG(); // max_dec_pic_buffering_minus1[i] gb.readUEG(); // max_num_reorder_pics[i] gb.readUEG(); // max_latency_increase_plus1[i] } gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); let scaling_list_enabled_flag = gb.readBool(); if (scaling_list_enabled_flag) { let sps_scaling_list_data_present_flag = gb.readBool(); if (sps_scaling_list_data_present_flag) { for (let sizeId = 0; sizeId < 4; sizeId++) { for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) { let scaling_list_pred_mode_flag = gb.readBool(); if (!scaling_list_pred_mode_flag) { gb.readUEG(); // scaling_list_pred_matrix_id_delta } else { let coefNum = Math.min(64, 1 << 4 + (sizeId << 1)); if (sizeId > 1) { gb.readSEG(); } for (let i = 0; i < coefNum; i++) { gb.readSEG(); } } } } } } gb.readBool(); gb.readBool(); let pcm_enabled_flag = gb.readBool(); if (pcm_enabled_flag) { gb.readByte(); gb.readUEG(); gb.readUEG(); gb.readBool(); } let num_short_term_ref_pic_sets = gb.readUEG(); let num_delta_pocs = 0; for (let i = 0; i < num_short_term_ref_pic_sets; i++) { let inter_ref_pic_set_prediction_flag = false; if (i !== 0) { inter_ref_pic_set_prediction_flag = gb.readBool(); } if (inter_ref_pic_set_prediction_flag) { if (i === num_short_term_ref_pic_sets) { gb.readUEG(); } gb.readBool(); gb.readUEG(); let next_num_delta_pocs = 0; for (let j = 0; j <= num_delta_pocs; j++) { let used_by_curr_pic_flag = gb.readBool(); let use_delta_flag = false; if (!used_by_curr_pic_flag) { use_delta_flag = gb.readBool(); } if (used_by_curr_pic_flag || use_delta_flag) { next_num_delta_pocs++; } } num_delta_pocs = next_num_delta_pocs; } else { let num_negative_pics = gb.readUEG(); let num_positive_pics = gb.readUEG(); num_delta_pocs = num_negative_pics + num_positive_pics; for (let j = 0; j < num_negative_pics; j++) { gb.readUEG(); gb.readBool(); } for (let j = 0; j < num_positive_pics; j++) { gb.readUEG(); gb.readBool(); } } } let long_term_ref_pics_present_flag = gb.readBool(); if (long_term_ref_pics_present_flag) { let num_long_term_ref_pics_sps = gb.readUEG(); for (let i = 0; i < num_long_term_ref_pics_sps; i++) { for (let j = 0; j < log2_max_pic_order_cnt_lsb_minus4 + 4; j++) { gb.readBits(1); } gb.readBits(1); } } //* let default_display_window_flag = false; // for calc offset let min_spatial_segmentation_idc = 0; // for hvcC let sar_width = 1, sar_height = 1; let fps_fixed = false, fps_den = 1, fps_num = 1; //*/ gb.readBool(); gb.readBool(); let vui_parameters_present_flag = gb.readBool(); if (vui_parameters_present_flag) { let aspect_ratio_info_present_flag = gb.readBool(); if (aspect_ratio_info_present_flag) { let aspect_ratio_idc = gb.readByte(); let sar_w_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2]; let sar_h_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1]; if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) { sar_width = sar_w_table[aspect_ratio_idc - 1]; sar_height = sar_h_table[aspect_ratio_idc - 1]; } else if (aspect_ratio_idc === 255) { sar_width = gb.readBits(16); sar_height = gb.readBits(16); } } let overscan_info_present_flag = gb.readBool(); if (overscan_info_present_flag) { gb.readBool(); } let video_signal_type_present_flag = gb.readBool(); if (video_signal_type_present_flag) { gb.readBits(3); gb.readBool(); let colour_description_present_flag = gb.readBool(); if (colour_description_present_flag) { gb.readByte(); gb.readByte(); gb.readByte(); } } let chroma_loc_info_present_flag = gb.readBool(); if (chroma_loc_info_present_flag) { gb.readUEG(); gb.readUEG(); } gb.readBool(); gb.readBool(); gb.readBool(); default_display_window_flag = gb.readBool(); if (default_display_window_flag) { left_offset += gb.readUEG(); right_offset += gb.readUEG(); top_offset += gb.readUEG(); bottom_offset += gb.readUEG(); } let vui_timing_info_present_flag = gb.readBool(); if (vui_timing_info_present_flag) { fps_den = gb.readBits(32); fps_num = gb.readBits(32); let vui_poc_proportional_to_timing_flag = gb.readBool(); if (vui_poc_proportional_to_timing_flag) { gb.readUEG(); let vui_hrd_parameters_present_flag = gb.readBool(); if (vui_hrd_parameters_present_flag) { let nal_hrd_parameters_present_flag = false; let vcl_hrd_parameters_present_flag = false; let sub_pic_hrd_params_present_flag = false; { nal_hrd_parameters_present_flag = gb.readBool(); vcl_hrd_parameters_present_flag = gb.readBool(); if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) { sub_pic_hrd_params_present_flag = gb.readBool(); if (sub_pic_hrd_params_present_flag) { gb.readByte(); gb.readBits(5); gb.readBool(); gb.readBits(5); } gb.readBits(4); gb.readBits(4); if (sub_pic_hrd_params_present_flag) { gb.readBits(4); } gb.readBits(5); gb.readBits(5); gb.readBits(5); } } for (let i = 0; i <= max_sub_layers_minus1; i++) { let fixed_pic_rate_general_flag = gb.readBool(); fps_fixed = fixed_pic_rate_general_flag; let fixed_pic_rate_within_cvs_flag = false; let cpbCnt = 1; if (!fixed_pic_rate_general_flag) { fixed_pic_rate_within_cvs_flag = gb.readBool(); } let low_delay_hrd_flag = false; if (fixed_pic_rate_within_cvs_flag) { gb.readSEG(); } else { low_delay_hrd_flag = gb.readBool(); } if (!low_delay_hrd_flag) { cpbcnt = gb.readUEG() + 1; } if (nal_hrd_parameters_present_flag) { for (let j = 0; j < cpbCnt; j++) { gb.readUEG(); gb.readUEG(); if (sub_pic_hrd_params_present_flag) { gb.readUEG(); gb.readUEG(); } } } if (vcl_hrd_parameters_present_flag) { for (let j = 0; j < cpbCnt; j++) { gb.readUEG(); gb.readUEG(); if (sub_pic_hrd_params_present_flag) { gb.readUEG(); gb.readUEG(); } } } } } } } let bitstream_restriction_flag = gb.readBool(); if (bitstream_restriction_flag) { gb.readBool(); gb.readBool(); gb.readBool(); min_spatial_segmentation_idc = gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); } } gb.readBool(); // ignore... // for meta data let codec_mimetype = `hvc1.${general_profile_idc}.1.L${general_level_idc}.B0`; let codec_width = pic_width_in_luma_samples; let codec_height = pic_height_in_luma_samples; let sar_scale = 1; if (sar_width !== 1 && sar_height !== 1) { sar_scale = sar_width / sar_height; } gb.destroy(); gb = null; return { codec_mimetype, level_string: getLevelString(general_level_idc), profile_idc: general_profile_idc, bit_depth: bit_depth_luma_minus8 + 8, ref_frames: 1, // FIXME!!! chroma_format: chroma_format_idc, chroma_format_string: getChromaFormatString(chroma_format_idc), general_level_idc, general_profile_space, general_tier_flag, general_profile_idc, general_profile_compatibility_flags_1, general_profile_compatibility_flags_2, general_profile_compatibility_flags_3, general_profile_compatibility_flags_4, general_constraint_indicator_flags_1, general_constraint_indicator_flags_2, general_constraint_indicator_flags_3, general_constraint_indicator_flags_4, general_constraint_indicator_flags_5, general_constraint_indicator_flags_6, min_spatial_segmentation_idc, constant_frame_rate: 0 /* FIXME!! fps_fixed ? 1 : 0? */, chroma_format_idc, bit_depth_luma_minus8, bit_depth_chroma_minus8, frame_rate: { fixed: fps_fixed, fps: fps_num / fps_den, fps_den: fps_den, fps_num: fps_num }, sar_ratio: { width: sar_width, height: sar_height }, codec_size: { width: codec_width, height: codec_height }, present_size: { width: codec_width * sar_scale, height: codec_height } }; }; const parseHevcVPS = uint8array => { let rbsp = _ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); // VPS gb.readBits(4); gb.readBits(2); gb.readBits(6); let max_sub_layers_minus1 = gb.readBits(3); let temporal_id_nesting_flag = gb.readBool(); // and more ... return { num_temporal_layers: max_sub_layers_minus1 + 1, temporal_id_nested: temporal_id_nesting_flag }; }; const parseHevcPPS = uint8array => { let rbsp = _ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); gb.readUEG(); gb.readUEG(); gb.readBool(); gb.readBool(); gb.readBits(3); gb.readBool(); gb.readBool(); gb.readUEG(); gb.readUEG(); gb.readSEG(); gb.readBool(); gb.readBool(); let cu_qp_delta_enabled_flag = gb.readBool(); if (cu_qp_delta_enabled_flag) { gb.readUEG(); } gb.readSEG(); gb.readSEG(); gb.readBool(); gb.readBool(); gb.readBool(); gb.readBool(); let tiles_enabled_flag = gb.readBool(); let entropy_coding_sync_enabled_flag = gb.readBool(); // and more ... // needs hvcC let parallelismType = 1; // slice-based parallel decoding if (entropy_coding_sync_enabled_flag && tiles_enabled_flag) { parallelismType = 0; // mixed-type parallel decoding } else if (entropy_coding_sync_enabled_flag) { parallelismType = 3; // wavefront-based parallel decoding } else if (tiles_enabled_flag) { parallelismType = 2; // tile-based parallel decoding } return { parallelismType }; }; class H265NaluParser { static _ebsp2rbsp(uint8array) { let src = uint8array; let src_length = src.byteLength; let dst = new Uint8Array(src_length); let dst_idx = 0; for (let i = 0; i < src_length; i++) { if (i >= 2) { // Unescape: Skip 0x03 after 00 00 if (src[i] === 0x03 && src[i - 1] === 0x00 && src[i - 2] === 0x00) { continue; } } dst[dst_idx] = src[i]; dst_idx++; } return new Uint8Array(dst.buffer, 0, dst_idx); } static parseVPS(uint8array) { let rbsp = H265NaluParser._ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); // VPS gb.readBits(4); gb.readBits(2); gb.readBits(6); let max_sub_layers_minus1 = gb.readBits(3); let temporal_id_nesting_flag = gb.readBool(); // and more ... return { num_temporal_layers: max_sub_layers_minus1 + 1, temporal_id_nested: temporal_id_nesting_flag }; } static parseSPS(uint8array) { let rbsp = H265NaluParser._ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); let left_offset = 0, right_offset = 0, top_offset = 0, bottom_offset = 0; // SPS gb.readBits(4); let max_sub_layers_minus1 = gb.readBits(3); gb.readBool(); // profile_tier_level begin let general_profile_space = gb.readBits(2); let general_tier_flag = gb.readBool(); let general_profile_idc = gb.readBits(5); let general_profile_compatibility_flags_1 = gb.readByte(); let general_profile_compatibility_flags_2 = gb.readByte(); let general_profile_compatibility_flags_3 = gb.readByte(); let general_profile_compatibility_flags_4 = gb.readByte(); let general_constraint_indicator_flags_1 = gb.readByte(); let general_constraint_indicator_flags_2 = gb.readByte(); let general_constraint_indicator_flags_3 = gb.readByte(); let general_constraint_indicator_flags_4 = gb.readByte(); let general_constraint_indicator_flags_5 = gb.readByte(); let general_constraint_indicator_flags_6 = gb.readByte(); let general_level_idc = gb.readByte(); let sub_layer_profile_present_flag = []; let sub_layer_level_present_flag = []; for (let i = 0; i < max_sub_layers_minus1; i++) { sub_layer_profile_present_flag.push(gb.readBool()); sub_layer_level_present_flag.push(gb.readBool()); } if (max_sub_layers_minus1 > 0) { for (let i = max_sub_layers_minus1; i < 8; i++) { gb.readBits(2); } } for (let i = 0; i < max_sub_layers_minus1; i++) { if (sub_layer_profile_present_flag[i]) { gb.readByte(); // sub_layer_profile_space, sub_layer_tier_flag, sub_layer_profile_idc gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); // sub_layer_profile_compatibility_flag gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); gb.readByte(); } if (sub_layer_level_present_flag[i]) { gb.readByte(); } } // profile_tier_level end gb.readUEG(); let chroma_format_idc = gb.readUEG(); if (chroma_format_idc == 3) { gb.readBits(1); // separate_colour_plane_flag } let pic_width_in_luma_samples = gb.readUEG(); let pic_height_in_luma_samples = gb.readUEG(); let conformance_window_flag = gb.readBool(); if (conformance_window_flag) { left_offset += gb.readUEG(); right_offset += gb.readUEG(); top_offset += gb.readUEG(); bottom_offset += gb.readUEG(); } let bit_depth_luma_minus8 = gb.readUEG(); let bit_depth_chroma_minus8 = gb.readUEG(); let log2_max_pic_order_cnt_lsb_minus4 = gb.readUEG(); let sub_layer_ordering_info_present_flag = gb.readBool(); for (let i = sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i <= max_sub_layers_minus1; i++) { gb.readUEG(); // max_dec_pic_buffering_minus1[i] gb.readUEG(); // max_num_reorder_pics[i] gb.readUEG(); // max_latency_increase_plus1[i] } gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); let scaling_list_enabled_flag = gb.readBool(); if (scaling_list_enabled_flag) { let sps_scaling_list_data_present_flag = gb.readBool(); if (sps_scaling_list_data_present_flag) { for (let sizeId = 0; sizeId < 4; sizeId++) { for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) { let scaling_list_pred_mode_flag = gb.readBool(); if (!scaling_list_pred_mode_flag) { gb.readUEG(); // scaling_list_pred_matrix_id_delta } else { let coefNum = Math.min(64, 1 << 4 + (sizeId << 1)); if (sizeId > 1) { gb.readSEG(); } for (let i = 0; i < coefNum; i++) { gb.readSEG(); } } } } } } gb.readBool(); gb.readBool(); let pcm_enabled_flag = gb.readBool(); if (pcm_enabled_flag) { gb.readByte(); gb.readUEG(); gb.readUEG(); gb.readBool(); } let num_short_term_ref_pic_sets = gb.readUEG(); let num_delta_pocs = 0; for (let i = 0; i < num_short_term_ref_pic_sets; i++) { let inter_ref_pic_set_prediction_flag = false; if (i !== 0) { inter_ref_pic_set_prediction_flag = gb.readBool(); } if (inter_ref_pic_set_prediction_flag) { if (i === num_short_term_ref_pic_sets) { gb.readUEG(); } gb.readBool(); gb.readUEG(); let next_num_delta_pocs = 0; for (let j = 0; j <= num_delta_pocs; j++) { let used_by_curr_pic_flag = gb.readBool(); let use_delta_flag = false; if (!used_by_curr_pic_flag) { use_delta_flag = gb.readBool(); } if (used_by_curr_pic_flag || use_delta_flag) { next_num_delta_pocs++; } } num_delta_pocs = next_num_delta_pocs; } else { let num_negative_pics = gb.readUEG(); let num_positive_pics = gb.readUEG(); num_delta_pocs = num_negative_pics + num_positive_pics; for (let j = 0; j < num_negative_pics; j++) { gb.readUEG(); gb.readBool(); } for (let j = 0; j < num_positive_pics; j++) { gb.readUEG(); gb.readBool(); } } } let long_term_ref_pics_present_flag = gb.readBool(); if (long_term_ref_pics_present_flag) { let num_long_term_ref_pics_sps = gb.readUEG(); for (let i = 0; i < num_long_term_ref_pics_sps; i++) { for (let j = 0; j < log2_max_pic_order_cnt_lsb_minus4 + 4; j++) { gb.readBits(1); } gb.readBits(1); } } //* let default_display_window_flag = false; // for calc offset let min_spatial_segmentation_idc = 0; // for hvcC let sar_width = 1, sar_height = 1; let fps_fixed = false, fps_den = 1, fps_num = 1; //*/ gb.readBool(); gb.readBool(); let vui_parameters_present_flag = gb.readBool(); if (vui_parameters_present_flag) { let aspect_ratio_info_present_flag = gb.readBool(); if (aspect_ratio_info_present_flag) { let aspect_ratio_idc = gb.readByte(); let sar_w_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2]; let sar_h_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1]; if (aspect_ratio_idc > 0 && aspect_ratio_idc <= 16) { sar_width = sar_w_table[aspect_ratio_idc - 1]; sar_height = sar_h_table[aspect_ratio_idc - 1]; } else if (aspect_ratio_idc === 255) { sar_width = gb.readBits(16); sar_height = gb.readBits(16); } } let overscan_info_present_flag = gb.readBool(); if (overscan_info_present_flag) { gb.readBool(); } let video_signal_type_present_flag = gb.readBool(); if (video_signal_type_present_flag) { gb.readBits(3); gb.readBool(); let colour_description_present_flag = gb.readBool(); if (colour_description_present_flag) { gb.readByte(); gb.readByte(); gb.readByte(); } } let chroma_loc_info_present_flag = gb.readBool(); if (chroma_loc_info_present_flag) { gb.readUEG(); gb.readUEG(); } gb.readBool(); gb.readBool(); gb.readBool(); default_display_window_flag = gb.readBool(); if (default_display_window_flag) { gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); } let vui_timing_info_present_flag = gb.readBool(); if (vui_timing_info_present_flag) { fps_den = gb.readBits(32); fps_num = gb.readBits(32); let vui_poc_proportional_to_timing_flag = gb.readBool(); if (vui_poc_proportional_to_timing_flag) { gb.readUEG(); } let vui_hrd_parameters_present_flag = gb.readBool(); if (vui_hrd_parameters_present_flag) { let nal_hrd_parameters_present_flag = false; let vcl_hrd_parameters_present_flag = false; let sub_pic_hrd_params_present_flag = false; { nal_hrd_parameters_present_flag = gb.readBool(); vcl_hrd_parameters_present_flag = gb.readBool(); if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) { sub_pic_hrd_params_present_flag = gb.readBool(); if (sub_pic_hrd_params_present_flag) { gb.readByte(); gb.readBits(5); gb.readBool(); gb.readBits(5); } gb.readBits(4); gb.readBits(4); if (sub_pic_hrd_params_present_flag) { gb.readBits(4); } gb.readBits(5); gb.readBits(5); gb.readBits(5); } } for (let i = 0; i <= max_sub_layers_minus1; i++) { let fixed_pic_rate_general_flag = gb.readBool(); fps_fixed = fixed_pic_rate_general_flag; let fixed_pic_rate_within_cvs_flag = true; let cpbCnt = 1; if (!fixed_pic_rate_general_flag) { fixed_pic_rate_within_cvs_flag = gb.readBool(); } let low_delay_hrd_flag = false; if (fixed_pic_rate_within_cvs_flag) { gb.readUEG(); } else { low_delay_hrd_flag = gb.readBool(); } if (!low_delay_hrd_flag) { cpbCnt = gb.readUEG() + 1; } if (nal_hrd_parameters_present_flag) { for (let j = 0; j < cpbCnt; j++) { gb.readUEG(); gb.readUEG(); if (sub_pic_hrd_params_present_flag) { gb.readUEG(); gb.readUEG(); } } gb.readBool(); } if (vcl_hrd_parameters_present_flag) { for (let j = 0; j < cpbCnt; j++) { gb.readUEG(); gb.readUEG(); if (sub_pic_hrd_params_present_flag) { gb.readUEG(); gb.readUEG(); } } gb.readBool(); } } } } let bitstream_restriction_flag = gb.readBool(); if (bitstream_restriction_flag) { gb.readBool(); gb.readBool(); gb.readBool(); min_spatial_segmentation_idc = gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); gb.readUEG(); } } gb.readBool(); // ignore... // for meta data let codec_mimetype = `hvc1.${general_profile_idc}.1.L${general_level_idc}.B0`; let sub_wc = chroma_format_idc === 1 || chroma_format_idc === 2 ? 2 : 1; let sub_hc = chroma_format_idc === 1 ? 2 : 1; let codec_width = pic_width_in_luma_samples - (left_offset + right_offset) * sub_wc; let codec_height = pic_height_in_luma_samples - (top_offset + bottom_offset) * sub_hc; let sar_scale = 1; if (sar_width !== 1 && sar_height !== 1) { sar_scale = sar_width / sar_height; } gb.destroy(); gb = null; return { codec_mimetype, profile_string: H265NaluParser.getProfileString(general_profile_idc), level_string: H265NaluParser.getLevelString(general_level_idc), profile_idc: general_profile_idc, bit_depth: bit_depth_luma_minus8 + 8, ref_frames: 1, // FIXME!!! chroma_format: chroma_format_idc, chroma_format_string: H265NaluParser.getChromaFormatString(chroma_format_idc), general_level_idc, general_profile_space, general_tier_flag, general_profile_idc, general_profile_compatibility_flags_1, general_profile_compatibility_flags_2, general_profile_compatibility_flags_3, general_profile_compatibility_flags_4, general_constraint_indicator_flags_1, general_constraint_indicator_flags_2, general_constraint_indicator_flags_3, general_constraint_indicator_flags_4, general_constraint_indicator_flags_5, general_constraint_indicator_flags_6, min_spatial_segmentation_idc, constant_frame_rate: 0 /* FIXME!! fps_fixed ? 1 : 0? */, chroma_format_idc, bit_depth_luma_minus8, bit_depth_chroma_minus8, frame_rate: { fixed: fps_fixed, fps: fps_num / fps_den, fps_den: fps_den, fps_num: fps_num }, sar_ratio: { width: sar_width, height: sar_height }, codec_size: { width: codec_width, height: codec_height }, present_size: { width: codec_width * sar_scale, height: codec_height } }; } static parsePPS(uint8array) { let rbsp = H265NaluParser._ebsp2rbsp(uint8array); let gb = new ExpGolomb$1(rbsp); /* remove NALu Header */ gb.readByte(); gb.readByte(); gb.readUEG(); gb.readUEG(); gb.readBool(); gb.readBool(); gb.readBits(3); gb.readBool(); gb.readBool(); gb.readUEG(); gb.readUEG(); gb.readSEG(); gb.readBool(); gb.readBool(); let cu_qp_delta_enabled_flag = gb.readBool(); if (cu_qp_delta_enabled_flag) { gb.readUEG(); } gb.readSEG(); gb.readSEG(); gb.readBool(); gb.readBool(); gb.readBool(); gb.readBool(); let tiles_enabled_flag = gb.readBool(); let entropy_coding_sync_enabled_flag = gb.readBool(); // and more ... // needs hvcC let parallelismType = 1; // slice-based parallel decoding if (entropy_coding_sync_enabled_flag && tiles_enabled_flag) { parallelismType = 0; // mixed-type parallel decoding } else if (entropy_coding_sync_enabled_flag) { parallelismType = 3; // wavefront-based parallel decoding } else if (tiles_enabled_flag) { parallelismType = 2; // tile-based parallel decoding } return { parallelismType }; } static getChromaFormatString(chroma_idc) { switch (chroma_idc) { case 0: return '4:0:0'; case 1: return '4:2:0'; case 2: return '4:2:2'; case 3: return '4:4:4'; default: return 'Unknown'; } } static getProfileString(profile_idc) { switch (profile_idc) { case 1: return 'Main'; case 2: return 'Main10'; case 3: return 'MainSP'; case 4: return 'Rext'; case 9: return 'SCC'; default: return 'Unknown'; } } static getLevelString(level_idc) { return (level_idc / 30).toFixed(1); } } /** * * @param arrayBuffer */ function parseHEVCDecoderConfigurationRecord$2(arrayBuffer) { let info = { codecWidth: 0, codecHeight: 0, videoType: VIDEO_ENCODE_TYPE.h265 }; info.width = 0; info.height = 0; info.profile = 0; info.level = 0; // remove 5 bytes arrayBuffer = arrayBuffer.slice(5); do { let hevc = {}; if (arrayBuffer.length < 23) { console.warn('parseHEVCDecoderConfigurationRecord$2', `arrayBuffer.length ${arrayBuffer.length} < 23`); break; } hevc.configurationVersion = arrayBuffer[0]; if (hevc.configurationVersion != 1) { break; } hevc.general_profile_space = arrayBuffer[1] >> 6 & 0x03; hevc.general_tier_flag = arrayBuffer[1] >> 5 & 0x01; hevc.general_profile_idc = arrayBuffer[1] & 0x1F; hevc.general_profile_compatibility_flags = arrayBuffer[2] << 24 | arrayBuffer[3] << 16 | arrayBuffer[4] << 8 | arrayBuffer[5]; hevc.general_constraint_indicator_flags = arrayBuffer[6] << 24 | arrayBuffer[7] << 16 | arrayBuffer[8] << 8 | arrayBuffer[9]; hevc.general_constraint_indicator_flags = hevc.general_constraint_indicator_flags << 16 | arrayBuffer[10] << 8 | arrayBuffer[11]; hevc.general_level_idc = arrayBuffer[12]; hevc.min_spatial_segmentation_idc = (arrayBuffer[13] & 0x0F) << 8 | arrayBuffer[14]; hevc.parallelismType = arrayBuffer[15] & 0x03; hevc.chromaFormat = arrayBuffer[16] & 0x03; hevc.bitDepthLumaMinus8 = arrayBuffer[17] & 0x07; hevc.bitDepthChromaMinus8 = arrayBuffer[18] & 0x07; hevc.avgFrameRate = arrayBuffer[19] << 8 | arrayBuffer[20]; hevc.constantFrameRate = arrayBuffer[21] >> 6 & 0x03; hevc.numTemporalLayers = arrayBuffer[21] >> 3 & 0x07; hevc.temporalIdNested = arrayBuffer[21] >> 2 & 0x01; hevc.lengthSizeMinusOne = arrayBuffer[21] & 0x03; let numOfArrays = arrayBuffer[22]; let p = arrayBuffer.slice(23); for (let i = 0; i < numOfArrays; i++) { if (p.length < 3) { break; } let nalutype = p[0] & 0x3F; let n = p[1] << 8 | p[2]; // console.log('nalutype', nalutype,n) p = p.slice(3); for (let j = 0; j < n; j++) { if (p.length < 2) { break; } let k = p[0] << 8 | p[1]; // console.log('k', k) if (p.length < 2 + k) { break; } p = p.slice(2); if (nalutype == 33) { //SPS let sps = new Uint8Array(k); sps.set(p.slice(0, k), 0); hevc.psps = HEVCParseSPS(sps, hevc); info.profile = hevc.general_profile_idc; info.level = hevc.general_level_idc / 30.0; info.width = hevc.psps.pic_width_in_luma_samples - (hevc.psps.conf_win_left_offset + hevc.psps.conf_win_right_offset); info.height = hevc.psps.pic_height_in_luma_samples - (hevc.psps.conf_win_top_offset + hevc.psps.conf_win_bottom_offset); } p = p.slice(k); } } } while (0); info.codecWidth = info.width || 1920; info.codecHeight = info.height || 1080; info.presentHeight = info.codecHeight; info.presentWidth = info.codecWidth; info.timescale = 1000; info.refSampleDuration = 1000 * (1000 / 23976); return info; } /** * * @param arrayBuffer * @returns {{}} * @desc v4 版本 */ function parseHEVCDecoderConfigurationRecord$4(arrayBuffer) { const hevcSequenceHeader = arrayBuffer; if (hevcSequenceHeader.length < 22) { console.error(`Invalid HEVCDecoderConfigurationRecord, lack of data!`); return; } let meta = { codecWidth: 0, codecHeight: 0, videoType: VIDEO_ENCODE_TYPE.h265 }; let le = function () { let buf = new ArrayBuffer(2); new DataView(buf).setInt16(0, 256, true); // little-endian write return new Int16Array(buf)[0] === 256; // platform-spec read, if equal then LE }(); let v = new DataView(hevcSequenceHeader.buffer); let version = v.getUint8(0); // configurationVersion let hevcProfile = v.getUint8(1) & 0x1F; // hevcProfileIndication if (version !== 1 || hevcProfile === 0) { console.error(`Flv: Invalid HEVCDecoderConfigurationRecord`); return; } let naluLengthSize = (v.getUint8(21) & 3) + 1; // lengthSizeMinusOne if (naluLengthSize !== 3 && naluLengthSize !== 4) { // holy shit!!! console.error(`Strange NaluLengthSizeMinusOne: ${naluLengthSize - 1}`); return; } let numOfArrays = v.getUint8(22); for (let i = 0, offset = 23; i < numOfArrays; i++) { let nalUnitType = v.getUint8(offset + 0) & 0x3F; let numNalus = v.getUint16(offset + 1, !le); offset += 3; for (let j = 0; j < numNalus; j++) { let len = v.getUint16(offset + 0, !le); if (j !== 0) { offset += 2 + len; continue; } if (nalUnitType === 33) { offset += 2; let sps = new Uint8Array(hevcSequenceHeader.buffer, offset, len); let config = H265NaluParser.parseSPS(sps); meta.codecWidth = config.codec_size.width; meta.codecHeight = config.codec_size.height; meta.presentWidth = config.present_size.width; meta.presentHeight = config.present_size.height; meta.profile = config.profile_string; meta.level = config.level_string; meta.bitDepth = config.bit_depth; meta.chromaFormat = config.chroma_format; meta.sarRatio = config.sar_ratio; meta.frameRate = config.frame_rate; if (config.frame_rate.fixed === false || config.frame_rate.fps_num === 0 || config.frame_rate.fps_den === 0) { meta.frameRate = { fixed: true, fps: 23.976, fps_num: 23976, fps_den: 1000 }; } meta.frameRate.fps_den; meta.frameRate.fps_num; // meta.refSampleDuration = meta.timescale * (fps_den / fps_num); meta.codec = config.codec_mimetype; offset += len; } else { offset += 2 + len; } } } meta.hvcc = new Uint8Array(hevcSequenceHeader); return meta; } function parseHEVCDecoderVPSAndSPSAndPPS(arrayBuffer) { // let offset = 28 - 5; // 23 // const vpsTag = arrayBuffer[offset]; if ((vpsTag & 0x3F) !== H265_NAL_TYPE.vps) { console.warn(`parseHEVCDecoderVPSAndSPSAndPPS and vpsTag is ${vpsTag}`); return {}; } offset += 2; offset += 1; const vpsLength = arrayBuffer[offset + 1] | arrayBuffer[offset] << 8; offset += 2; const vpsData = arrayBuffer.slice(offset, offset + vpsLength); // console.log('vpsData:', Uint8Array.from(vpsData)); offset += vpsLength; const spsTag = arrayBuffer[offset]; if ((spsTag & 0x3F) !== H265_NAL_TYPE.sps) { console.warn(`parseHEVCDecoderVPSAndSPSAndPPS and sps tag is ${spsTag}`); return {}; } offset += 2; offset += 1; const spsLength = arrayBuffer[offset + 1] | arrayBuffer[offset] << 8; offset += 2; const spsData = arrayBuffer.slice(offset, offset + spsLength); // console.log('spsData:', Uint8Array.from(spsData)); offset += spsLength; const ppsTag = arrayBuffer[offset]; if ((ppsTag & 0x3F) !== H265_NAL_TYPE.pps) { console.warn(`parseHEVCDecoderVPSAndSPSAndPPS and pps tag is ${ppsTag}`); return {}; } offset += 2; offset += 1; const ppsLength = arrayBuffer[offset + 1] | arrayBuffer[offset] << 8; offset += 2; const ppsData = arrayBuffer.slice(offset, offset + ppsLength); // console.log('ppsData:', Uint8Array.from(ppsData)); const spsFlag = new Uint8Array([spsLength >>> 24 & 0xFF, spsLength >>> 16 & 0xFF, spsLength >>> 8 & 0xFF, spsLength & 0xFF]); const ppsFlag = new Uint8Array([ppsLength >>> 24 & 0xFF, ppsLength >>> 16 & 0xFF, ppsLength >>> 8 & 0xFF, ppsLength & 0xFF]); const vpsFlag = new Uint8Array([vpsLength >>> 24 & 0xFF, vpsLength >>> 16 & 0xFF, vpsLength >>> 8 & 0xFF, vpsLength & 0xFF]); const sps = new Uint8Array(spsLength + 4); sps.set(spsFlag, 0); sps.set(spsData, 4); const pps = new Uint8Array(ppsLength + 4); pps.set(ppsFlag, 0); pps.set(ppsData, 4); const vps = new Uint8Array(vpsLength + 4); vps.set(vpsFlag, 0); vps.set(vpsData, 4); return { sps, pps, vps }; } function HEVCParsePtl(bitop, hevc, max_sub_layers_minus1) { let general_ptl = {}; general_ptl.profile_space = bitop.read(2); general_ptl.tier_flag = bitop.read(1); general_ptl.profile_idc = bitop.read(5); general_ptl.profile_compatibility_flags = bitop.read(32); general_ptl.general_progressive_source_flag = bitop.read(1); general_ptl.general_interlaced_source_flag = bitop.read(1); general_ptl.general_non_packed_constraint_flag = bitop.read(1); general_ptl.general_frame_only_constraint_flag = bitop.read(1); bitop.read(32); bitop.read(12); general_ptl.level_idc = bitop.read(8); general_ptl.sub_layer_profile_present_flag = []; general_ptl.sub_layer_level_present_flag = []; for (let i = 0; i < max_sub_layers_minus1; i++) { general_ptl.sub_layer_profile_present_flag[i] = bitop.read(1); general_ptl.sub_layer_level_present_flag[i] = bitop.read(1); } if (max_sub_layers_minus1 > 0) { for (let i = max_sub_layers_minus1; i < 8; i++) { bitop.read(2); } } general_ptl.sub_layer_profile_space = []; general_ptl.sub_layer_tier_flag = []; general_ptl.sub_layer_profile_idc = []; general_ptl.sub_layer_profile_compatibility_flag = []; general_ptl.sub_layer_progressive_source_flag = []; general_ptl.sub_layer_interlaced_source_flag = []; general_ptl.sub_layer_non_packed_constraint_flag = []; general_ptl.sub_layer_frame_only_constraint_flag = []; general_ptl.sub_layer_level_idc = []; for (let i = 0; i < max_sub_layers_minus1; i++) { if (general_ptl.sub_layer_profile_present_flag[i]) { general_ptl.sub_layer_profile_space[i] = bitop.read(2); general_ptl.sub_layer_tier_flag[i] = bitop.read(1); general_ptl.sub_layer_profile_idc[i] = bitop.read(5); general_ptl.sub_layer_profile_compatibility_flag[i] = bitop.read(32); general_ptl.sub_layer_progressive_source_flag[i] = bitop.read(1); general_ptl.sub_layer_interlaced_source_flag[i] = bitop.read(1); general_ptl.sub_layer_non_packed_constraint_flag[i] = bitop.read(1); general_ptl.sub_layer_frame_only_constraint_flag[i] = bitop.read(1); bitop.read(32); bitop.read(12); } if (general_ptl.sub_layer_level_present_flag[i]) { general_ptl.sub_layer_level_idc[i] = bitop.read(8); } else { general_ptl.sub_layer_level_idc[i] = 1; } } return general_ptl; } function HEVCParseSPS(SPS, hevc) { let psps = {}; let NumBytesInNALunit = SPS.length; let rbsp_array = []; let bitop = new Bitop(SPS); bitop.read(1); //forbidden_zero_bit bitop.read(6); //nal_unit_type bitop.read(6); //nuh_reserved_zero_6bits bitop.read(3); //nuh_temporal_id_plus1 for (let i = 2; i < NumBytesInNALunit; i++) { if (i + 2 < NumBytesInNALunit && bitop.look(24) == 0x000003) { rbsp_array.push(bitop.read(8)); rbsp_array.push(bitop.read(8)); i += 2; bitop.read(8); /* equal to 0x03 */ } else { rbsp_array.push(bitop.read(8)); } } let rbsp = new Uint8Array(rbsp_array); let rbspBitop = new Bitop(rbsp); psps.sps_video_parameter_set_id = rbspBitop.read(4); psps.sps_max_sub_layers_minus1 = rbspBitop.read(3); psps.sps_temporal_id_nesting_flag = rbspBitop.read(1); psps.profile_tier_level = HEVCParsePtl(rbspBitop, hevc, psps.sps_max_sub_layers_minus1); psps.sps_seq_parameter_set_id = rbspBitop.read_golomb(); psps.chroma_format_idc = rbspBitop.read_golomb(); if (psps.chroma_format_idc == 3) { psps.separate_colour_plane_flag = rbspBitop.read(1); } else { psps.separate_colour_plane_flag = 0; } psps.pic_width_in_luma_samples = rbspBitop.read_golomb(); psps.pic_height_in_luma_samples = rbspBitop.read_golomb(); psps.conformance_window_flag = rbspBitop.read(1); if (psps.conformance_window_flag) { let vert_mult = 1 + (psps.chroma_format_idc < 2); let horiz_mult = 1 + (psps.chroma_format_idc < 3); psps.conf_win_left_offset = rbspBitop.read_golomb() * horiz_mult; psps.conf_win_right_offset = rbspBitop.read_golomb() * horiz_mult; psps.conf_win_top_offset = rbspBitop.read_golomb() * vert_mult; psps.conf_win_bottom_offset = rbspBitop.read_golomb() * vert_mult; } else { psps.conf_win_left_offset = 0; psps.conf_win_right_offset = 0; psps.conf_win_top_offset = 0; psps.conf_win_bottom_offset = 0; } // Logger.debug(psps); return psps; } function hevcEncoderConfigurationRecord$2(_ref2) { let { vps, pps, sps } = _ref2; let detail = { configurationVersion: 1 }; const vpsDetail = parseHevcVPS(vps); const spsDetail = parseHevcSPS(sps); const ppsDetail = parseHevcPPS(pps); detail = Object.assign(detail, vpsDetail, spsDetail, ppsDetail); let length = 23 + (3 + 2 + vps.byteLength) + (3 + 2 + sps.byteLength) + (3 + 2 + pps.byteLength); let data = new Uint8Array(length); data[0] = 0x01; // configurationVersion data[1] = (detail.general_profile_space & 0x03) << 6 | (detail.general_tier_flag ? 1 : 0) << 5 | detail.general_profile_idc & 0x1F; data[2] = detail.general_profile_compatibility_flags_1 || 0; data[3] = detail.general_profile_compatibility_flags_2 || 0; data[4] = detail.general_profile_compatibility_flags_3 || 0; data[5] = detail.general_profile_compatibility_flags_4 || 0; data[6] = detail.general_constraint_indicator_flags_1 || 0; data[7] = detail.general_constraint_indicator_flags_2 || 0; data[8] = detail.general_constraint_indicator_flags_3 || 0; data[9] = detail.general_constraint_indicator_flags_4 || 0; data[10] = detail.general_constraint_indicator_flags_5 || 0; data[11] = detail.general_constraint_indicator_flags_6 || 0; data[12] = 0x3C; data[13] = 0xF0 | (detail.min_spatial_segmentation_idc & 0x0F00) >> 8; data[14] = detail.min_spatial_segmentation_idc & 0xFF; data[15] = 0xFC | detail.parallelismType & 0x03; data[16] = 0xFC | detail.chroma_format_idc & 0x03; data[17] = 0xF8 | detail.bit_depth_luma_minus8 & 0x07; data[18] = 0xF8 | detail.bit_depth_chroma_minus8 & 0x07; data[19] = 0; data[20] = 0; data[21] = (detail.constant_frame_rate & 0x03) << 6 | (detail.num_temporal_layers & 0x07) << 3 | (detail.temporal_id_nested ? 1 : 0) << 2 | 3; data[22] = 3; data[23 + 0 + 0] = 0x80 | H265_NAL_TYPE.vps; data[23 + 0 + 1] = 0; data[23 + 0 + 2] = 1; data[23 + 0 + 3] = (vps.byteLength & 0xFF00) >> 8; data[23 + 0 + 4] = (vps.byteLength & 0x00FF) >> 0; data.set(vps, 23 + 0 + 5); data[23 + (5 + vps.byteLength) + 0] = 0x80 | H265_NAL_TYPE.sps; data[23 + (5 + vps.byteLength) + 1] = 0; data[23 + (5 + vps.byteLength) + 2] = 1; data[23 + (5 + vps.byteLength) + 3] = (sps.byteLength & 0xFF00) >> 8; data[23 + (5 + vps.byteLength) + 4] = (sps.byteLength & 0x00FF) >> 0; data.set(sps, 23 + (5 + vps.byteLength) + 5); data[23 + (5 + vps.byteLength + 5 + sps.byteLength) + 0] = 0x80 | H265_NAL_TYPE.pps; data[23 + (5 + vps.byteLength + 5 + sps.byteLength) + 1] = 0; data[23 + (5 + vps.byteLength + 5 + sps.byteLength) + 2] = 1; data[23 + (5 + vps.byteLength + 5 + sps.byteLength) + 3] = (pps.byteLength & 0xFF00) >> 8; data[23 + (5 + vps.byteLength + 5 + sps.byteLength) + 4] = (pps.byteLength & 0x00FF) >> 0; data.set(pps, 23 + (5 + vps.byteLength + 5 + sps.byteLength) + 5); const prevData = [0x1c, 0, 0, 0, 0]; const newData = new Uint8Array(prevData.length + data.byteLength); newData.set(prevData, 0); newData.set(data, prevData.length); return newData; } /** * * @param oneNALBuffer * @param isIframe * @returns {Uint8Array} */ function hevcEncoderNalePacket(oneNALBuffer, isIframe) { // 正常发送nal // 这边增加 是否i帧, 然后前面封装了1 + 8 个字节的数据。 const idrBit = 0x10 | 12; const nIdrBit = 0x20 | 12; let tmp = []; if (isIframe) { tmp[0] = idrBit; } else { tmp[0] = nIdrBit; } tmp[1] = 1; // tmp[2] = 0; tmp[3] = 0; tmp[4] = 0; // 真正开始的地方。。。 tmp[5] = oneNALBuffer.byteLength >> 24 & 0xff; tmp[6] = oneNALBuffer.byteLength >> 16 & 0xff; tmp[7] = oneNALBuffer.byteLength >> 8 & 0xff; tmp[8] = oneNALBuffer.byteLength & 0xff; const arrayBuffer = new Uint8Array(tmp.length + oneNALBuffer.byteLength); arrayBuffer.set(tmp, 0); arrayBuffer.set(oneNALBuffer, tmp.length); return arrayBuffer; } function hevcEncoderNalePacketNotLength(oneNALBuffer, isIframe) { // 正常发送nal // 这边增加 是否i帧, 然后前面封装了1 + 8 个字节的数据。 const idrBit = 0x10 | 12; const nIdrBit = 0x20 | 12; let tmp = []; if (isIframe) { tmp[0] = idrBit; } else { tmp[0] = nIdrBit; } tmp[1] = 1; // tmp[2] = 0; tmp[3] = 0; tmp[4] = 0; const arrayBuffer = new Uint8Array(tmp.length + oneNALBuffer.byteLength); arrayBuffer.set(tmp, 0); arrayBuffer.set(oneNALBuffer, tmp.length); return arrayBuffer; } function getHevcSeqHeadType(nalu) { return (nalu[0] & 0x7E) >> 1; } function isHevcSEIType(type) { return type === H265_NAL_TYPE.sei; } // 32-40是VPS SPS PPS SUFFIX_SEI_NUT等 function isHevcSeqHead(type) { return type >= 32 && type <= 40; } function isNotHevcSeqHead(type) { return !isHevcSeqHead(type); } // 16-21是关键(I)帧 function isHevcNaluIFrame(type) { return type >= 16 && type <= 21; } function isSameHevcNaluType(naluList) { if (naluList.length === 0) { return false; } const type = getHevcSeqHeadType(naluList[0]); for (let i = 1; i < naluList.length; i++) { if (type !== getHevcSeqHeadType(naluList[i])) { return false; } } return true; } class H265AnnexBParser { constructor(data) { this.data = data; this.eofFlag = false; this.currentStartcodeOffset = this.findNextStartCodeOffset(0); if (this.eofFlag) { console.error('Could not find H265 startcode until payload end!'); } } findNextStartCodeOffset(start_offset) { let i = start_offset; let data = this.data; while (true) { if (i + 3 >= data.byteLength) { this.eofFlag = true; return data.byteLength; } // search 00 00 00 01 or 00 00 01 let uint32 = data[i + 0] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]; let uint24 = data[i + 0] << 16 | data[i + 1] << 8 | data[i + 2]; if (uint32 === 0x00000001 || uint24 === 0x000001) { return i; } else { i++; } } } readNextNaluPayload() { let data = this.data; let nalu_payload = null; while (nalu_payload == null) { if (this.eofFlag) { break; } // offset pointed to start code let startcode_offset = this.currentStartcodeOffset; // nalu payload start offset let offset = startcode_offset; let u32 = data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3]; if (u32 === 0x00000001) { offset += 4; } else { offset += 3; } let nalu_type = data[offset] >> 1 & 0x3F; let forbidden_bit = (data[offset] & 0x80) >>> 7; let next_startcode_offset = this.findNextStartCodeOffset(offset); this.currentStartcodeOffset = next_startcode_offset; if (forbidden_bit !== 0) { // Log.e(this.TAG, `forbidden_bit near offset ${offset} should be 0 but has value ${forbidden_bit}`); continue; } let payload_data = data.subarray(offset, next_startcode_offset); nalu_payload = { type: nalu_type, data: payload_data }; } return nalu_payload; } } class H265NaluHVC1 { constructor(nalu) { let nalu_size = nalu.data.byteLength; this.type = nalu.type; this.data = new Uint8Array(4 + nalu_size); // 4 byte length-header + nalu payload let v = new DataView(this.data.buffer); // Fill 4 byte length-header v.setUint32(0, nalu_size); // Copy payload this.data.set(nalu.data, 4); } } class CommonLoader$1 extends Emitter { constructor(player) { super(); this.TAG_NAME = 'recorderCommon'; this.player = player; this.fileName = ''; this._isRecording = false; this._recordingTimestamp = 0; this.recordingInterval = null; // this.sps = null; this.pps = null; this.vps = null; this.codecId = null; this.audioCodeId = null; this.metaInfo = { codecWidth: 0, codecHeight: 0, presentWidth: 0, presentHeight: 0, refSampleDuration: 0, timescale: 1000, avcc: null, videoType: '' }; this.audioMetaInfo = { timescale: 1000, sampleRate: 0, refSampleDuration: 0, channelCount: 0, codec: '', originalCodec: '', audioType: '', extraData: new Uint8Array(0) }; } destroy() { this._reset(); this.sps = null; this.pps = null; this.vps = null; this.codecId = null; this.audioCodeId = null; this.metaInfo = null; this.audioMetaInfo = null; } get isH264() { return this.codecId === VIDEO_ENC_CODE.h264; } get isH265() { return this.codecId === VIDEO_ENC_CODE.h265; } setFileName(fileName) { this.fileName = fileName; } get isRecording() { return this._isRecording; } get recording() { return this._isRecording; } get recordTime() { return this._recordingTimestamp; } startRecord() { // 子类实现 } // just for mp4 handleAddNaluTrack(payload, isIframe, dts, cts) { // 子类实现 } // just for mp4 handleAddAudioTrack(payload, dts) { // 子类实现 } handleAddTrack(arrayBuffer) { // 子类实现 } stopRecordAndSave() { // 子类实现 } startRecordingInterval() { // 子类实现 } isWasmMp4() { return false; } stopRecordingInterval() { if (this.recordingInterval) { clearInterval(this.recordingInterval); } this.recordingInterval = null; } getToTalByteLength() { // 子类实现 return 0; } _reset() { this.fileName = ''; this._isRecording = false; this._recordingTimestamp = 0; this.stopRecordingInterval(); } initMetaData(arrayBuffer, codecId) { let metaData; const extraData = arrayBuffer.slice(5); this.codecId = codecId; this.metaInfo.avcc = extraData; // if (codecId === VIDEO_ENC_CODE.h264) { metaData = parseAVCDecoderConfigurationRecord(extraData); } else if (codecId === VIDEO_ENC_CODE.h265) { metaData = parseHEVCDecoderVPSAndSPSAndPPS(extraData); const metaData2 = parseHEVCDecoderConfigurationRecord$2(arrayBuffer); metaData = Object.assign(metaData, metaData2); } //console.log('initMetaData-metaData', metaData); if (metaData) { if (metaData.vps) { this.vps = metaData.vps; } if (metaData.pps) { this.pps = metaData.pps; } if (metaData.sps) { this.sps = metaData.sps; } if (metaData.presentWidth) { this.metaInfo.presentWidth = metaData.presentWidth; } if (metaData.presentHeight) { this.metaInfo.presentHeight = metaData.presentHeight; } if (metaData.codecWidth) { this.metaInfo.codecWidth = metaData.codecWidth; } if (metaData.codecHeight) { this.metaInfo.codecHeight = metaData.codecHeight; } if (metaData.timescale) { this.metaInfo.timescale = metaData.timescale; } if (metaData.refSampleDuration) { this.metaInfo.refSampleDuration = metaData.refSampleDuration; } if (metaData.videoType) { this.metaInfo.videoType = metaData.videoType; } } // console.log('initMetaData-metaInfo', this.metaInfo); } initAudioMetaData(arrayBuffer, codecId) { this.audioCodeId = codecId; const size = arrayBuffer[0] >> 1 & 0x01; let metaData = null; if (codecId === AUDIO_ENC_CODE.AAC) { metaData = readAACSpecificConfig(arrayBuffer); if (metaData) { if (metaData.channelCount) { this.audioMetaInfo.channelCount = metaData.channelCount; } if (metaData.codec) { this.audioMetaInfo.codec = metaData.codec; } if (metaData.originalCodec) { this.audioMetaInfo.originalCodec = metaData.originalCodec; } if (metaData.config) { this.audioMetaInfo.config = metaData.config; } if (metaData.sampleRate) { this.audioMetaInfo.sampleRate = metaData.sampleRate; } if (this.audioMetaInfo.sampleRate && this.audioMetaInfo.timescale) { // The decode result of an aac sample is 1024 PCM samples this.audioMetaInfo.refSampleDuration = 1024 / this.audioMetaInfo.sampleRate * this.audioMetaInfo.timescale; } } this.audioMetaInfo.depth = size ? 16 : 8; this.audioMetaInfo.extraData = arrayBuffer.slice(2); } else { this.audioMetaInfo.depth = size === 0 ? 8 : 16; } } initAudioAacExtraData(arrayBuffer) { this.audioMetaInfo.extraData = new Uint8Array(arrayBuffer); } } class RecordRTCLoader extends CommonLoader$1 { constructor(player) { super(player); this.totalByteLength = 0; this._startRecordingTimestamp = null; player.debug.log('RecorderRTC', 'init'); } _reset() { super._reset(); this.totalByteLength = 0; this._startRecordingTimestamp = null; if (this.recorder) { this.recorder.destroy(); this.recorder = null; } } destroy() { super.destroy(); this._reset(); this.player.debug.log('RecorderRTC', 'destroy'); } getSeekableBlob(inputBlob) { const reader = new EBML.Reader(); const decoder = new EBML.Decoder(); const tool = EBML.tools; const fileReader = new FileReader(); const renderPromise = new Promise((res, rej) => { fileReader.onload = function (e) { const ebmlElms = decoder.decode(this.result); ebmlElms.forEach(function (element) { reader.read(element); }); reader.stop(); const refinedMetadataBuf = tool.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues); const body = this.result.slice(reader.metadataSize); const newBlob = new Blob([refinedMetadataBuf, body], { type: "video/webm" }); res(newBlob); }; fileReader.readAsArrayBuffer(inputBlob); }); return renderPromise; } startRecord() { const debug = this.player.debug; const options = { type: 'video', mimeType: 'video/webm;codecs=h264', timeSlice: 1000, onTimeStamp: timestamp => { debug.log('RecorderRTC', 'record timestamp :' + timestamp); if (this._startRecordingTimestamp === null) { this._startRecordingTimestamp = timestamp; } this._recordingTimestamp = (timestamp - this._startRecordingTimestamp) / 1000; }, ondataavailable: blob => { this.totalByteLength += blob.size; debug.log('RecorderRTC', 'ondataavailable', blob.size); }, disableLogs: !this.player._opt.debug }; try { // video let stream = null; // safari 浏览器不支持 captureStream() 方法。 // canvas if (this.player.getRenderType() === RENDER_TYPE.canvas) { stream = this.player.video.$videoElement.captureStream(25); } else { // video // wcs or wasm decode if (this.player.video.mediaStream) { stream = this.player.video.mediaStream; } else { if (this.player.isOldHls()) { // hls stream = this.player.video.$videoElement.captureStream(25); } else if (this.player._opt.useMSE) { // mse decode stream = this.player.video.$videoElement.captureStream(25); } else if (this.player._opt.useWCS) { // wcs stream = this.player.video.$videoElement.captureStream(25); } else if (this.player.isWebrtcH264()) { // webrtc stream = this.player.webrtc.videoStream; } else if (this.player.isAliyunRtc()) { // aliyun rtc stream = this.player.video.$videoElement.captureStream(25); } } } if (stream) { // audio if (this.player.audio && this.player.audio.mediaStreamAudioDestinationNode && this.player.audio.mediaStreamAudioDestinationNode.stream && !this.player.audio.isStateSuspended() && this.player.audio.hasAudio && this.player._opt.hasAudio) { const audioStream = this.player.audio.mediaStreamAudioDestinationNode.stream; if (audioStream.getAudioTracks().length > 0) { const audioTrack = audioStream.getAudioTracks()[0]; if (audioTrack && audioTrack.enabled) { stream.addTrack(audioTrack); } } } this.recorder = RecordRTC_1(stream, options); } else { debug.error('RecorderRTC', 'startRecord error and can not create stream'); this.player.emitError(EVENTS.recordCreateError, 'can not create stream'); return; } } catch (e) { debug.error('RecorderRTC', 'startRecord error', e); this.player.emitError(EVENTS.recordCreateError, e); return; } if (this.recorder) { this._isRecording = true; this.player.emit(EVENTS.recording, true); this.recorder.startRecording(); debug.log('RecorderRTC', 'start recording'); this.player.emit(EVENTS.recordStart); this.startRecordingInterval(); } } startRecordingInterval() { this.stopRecordingInterval(); this.recordingInterval = window.setInterval(() => { this.player.emit(EVENTS.recordingTimestamp, this._recordingTimestamp); }, 1000); } stopRecordAndSave() { let type = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : RECORDING_TYPE.download; let fileName = arguments.length > 1 ? arguments[1] : undefined; return new Promise((resolve, reject) => { if (!this.recorder || !this._isRecording) { reject('recorder is not ready'); } if (fileName) { this.setFileName(fileName); } this.recorder.stopRecording(() => { this.player.debug.log('RecorderRTC', 'stop recording'); const fileName = (this.fileName || now$2()) + '.' + FILE_SUFFIX.webm; this.recorder.getBlob(); // this.getSeekableBlob(blob).then((seekableBlob) => { // if (type === RECORDING_TYPE.blob) { // resolve(seekableBlob); // this.player.emit(EVENTS.recordBlob, seekableBlob); // } else { // resolve(); // RecordRTC.invokeSaveAsDialog(seekableBlob, fileName); // } // }) if (type === RECORDING_TYPE.blob) { const blob = this.recorder.getBlob(); resolve(blob); this.player.emit(EVENTS.recordBlob, blob); } else { resolve(); // saveAs(blob, fileName) this.recorder.save(fileName); } this.player.emit(EVENTS.recordEnd); this._reset(); this.player.emit(EVENTS.recording, false); }); }); } getToTalByteLength() { return this.totalByteLength; } getTotalDuration() { return this.recordTime; } getType() { return FILE_SUFFIX.webm; } // 占个坑位 initMetaData() {} } class MP4$3 { static init() { MP4$3.types = { avc1: [], avcC: [], hvc1: [], hvcC: [], btrt: [], dinf: [], dref: [], esds: [], ftyp: [], // 视频类型 hdlr: [], mdat: [], // 视频数据 mdhd: [], mdia: [], mfhd: [], minf: [], moof: [], // moov: [], // 视频信息(视频参数) mp4a: [], mvex: [], mvhd: [], sdtp: [], stbl: [], stco: [], stsc: [], stsd: [], stsz: [], stts: [], tfdt: [], tfhd: [], traf: [], // 视频参数(moov)中主要的子box 为track,每个track都是一个随时间变化的媒体序列, // 时间单位为一个sample,可以是一帧数据,或者音频(注意,一帧音频可以分解成多个音频sample,所以音频一般用sample作为单位,而不用帧) trak: [], trun: [], trex: [], tkhd: [], vmhd: [], smhd: [], '.mp3': [], free: [], edts: [], elst: [], stss: [] }; for (let name in MP4$3.types) { if (MP4$3.types.hasOwnProperty(name)) { MP4$3.types[name] = [name.charCodeAt(0), name.charCodeAt(1), name.charCodeAt(2), name.charCodeAt(3)]; } } let constants = MP4$3.constants = {}; // File Type Box,描述文件遵从的MP4规范与版本 constants.FTYP = new Uint8Array([0x69, 0x73, 0x6F, 0x6D, // major_brand: isom 0x0, 0x0, 0x02, 0x0, // minor_version: 0x20 0x69, 0x73, 0x6F, 0x6D, // isom 0x69, 0x73, 0x6F, 0x32, // iso 0x61, 0x76, 0x63, 0x31, // avc1 0x6D, 0x70, 0x34, 0x31, // MP4 0x00, 0x00, 0x00, 0x00]); constants.STSD_PREFIX = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x01 // entry_count ]); constants.STTS = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00 // entry_count ]); constants.STSC = constants.STCO = constants.STTS; constants.STSZ = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // sample_size 0x00, 0x00, 0x00, 0x00 // sample_count ]); constants.HDLR_VIDEO = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide' 0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x56, 0x69, 0x64, 0x65, 0x6F, 0x48, 0x61, 0x6E, 0x64, 0x6C, 0x65, 0x72, 0x00 // name: VideoHandler ]); constants.HDLR_AUDIO = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x73, 0x6F, 0x75, 0x6E, // handler_type: 'soun' 0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x53, 0x6F, 0x75, 0x6E, 0x64, 0x48, 0x61, 0x6E, 0x64, 0x6C, 0x65, 0x72, 0x00 // name: SoundHandler ]); constants.DREF = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x01, // entry_count 0x00, 0x00, 0x00, 0x0C, // entry_size 0x75, 0x72, 0x6C, 0x20, // type 'url ' 0x00, 0x00, 0x00, 0x01 // version(0) + flags ]); // Sound media header constants.SMHD = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00 // balance(2) + reserved(2) ]); // video media header constants.VMHD = new Uint8Array([0x00, 0x00, 0x00, 0x01, // version(0) + flags 0x00, 0x00, // graphicsmode: 2 bytes 0x00, 0x00, 0x00, 0x00, // opcolor: 3 * 2 bytes 0x00, 0x00]); } // Generate a box // Type是box的类型 // box的参数除了第一个为类型,其他参数都需要是二进制的arraybuffer类型。 static box(type) { // box前8位为预留位,这8位中前4位为数据size, // 当size值为0时,表示该box为文件的最后一个box(仅存在于mdat box中), // 当size值为1时,表示该box的size为large size(8位) // 真正的box size要在largesize中得到(同样仅存在于mdat box中)。 // 后4位为前面box type的Unicode编码。当type是uuid时,代表Box中的数据是用户自定义扩展类型。 let size = 8; let result = null; // 方法中的第三行表示获取参数中除去第一个参数的其他参数 let datas = Array.prototype.slice.call(arguments, 1); let arrayCount = datas.length; for (let i = 0; i < arrayCount; i++) { size += datas[i].byteLength; } // Box由header和body组成 result = new Uint8Array(size); // 以32位的4字节整数存储方式存储到内存, // 开头4个字节(32位)为box size。 result[0] = size >>> 24 & 0xFF; // size result[1] = size >>> 16 & 0xFF; result[2] = size >>> 8 & 0xFF; result[3] = size & 0xFF; // 后面紧跟的4位为box的类型 result.set(type, 4); // type let offset = 8; // Box body可以由数据组成,也可以由子box组成。 for (let i = 0; i < arrayCount; i++) { // data body result.set(datas[i], offset); offset += datas[i].byteLength; } return result; } // emit ftyp & moov static generateInitSegment(meta, trakList, mdatBytes) { // Ftypbox 是一个由四个字符组成的码字,用来表示编码类型、兼容协议或者媒体文件的用途。 // 在普通MP4文件中,ftyp box有且仅有一个,在文件的开始位置。 let ftyp = MP4$3.box(MP4$3.types.ftyp, MP4$3.constants.FTYP); // let free = MP4$3.box(MP4$3.types.free); // allocate mdatbox init fps = 25 let offset = 8; let mdatbox = new Uint8Array(); // 所有的长度。。。。。 // Mdat box中,可能会使用到box的large size,当数据足够大,无法用4个字节来描述时,便会使用到large size。 // 在读取MP4文件时,当mdat box的size位为1时,真正的box size在large size中, // 同样在写mp4文件时,若需要large size,需要将box size位配置为1。 if (mdatBytes + offset >= Math.pow(2, 32) - 1) { //large size offset = 16; mdatbox = new Uint8Array(mdatBytes + offset); mdatbox.set(new Uint8Array([0x00, 0x00, 0x00, 0x01]), 0); // 视频数据(mdat) mdatbox.set(MP4$3.types.mdat, 4); mdatbox.set(new Uint8Array([mdatBytes + 8 >>> 56 & 0xFF, mdatBytes + 8 >>> 48 & 0xFF, mdatBytes + 8 >>> 40 & 0xFF, mdatBytes + 8 >>> 32 & 0xFF, mdatBytes + 8 >>> 24 & 0xFF, mdatBytes + 8 >>> 16 & 0xFF, mdatBytes + 8 >>> 8 & 0xFF, mdatBytes + 8 & 0xFF]), 8); } else { mdatbox = new Uint8Array(mdatBytes + offset); mdatbox[0] = mdatBytes + 8 >>> 24 & 0xFF; mdatbox[1] = mdatBytes + 8 >>> 16 & 0xFF; mdatbox[2] = mdatBytes + 8 >>> 8 & 0xFF; mdatbox[3] = mdatBytes + 8 & 0xFF; //视频数据(mdat) mdatbox.set(MP4$3.types.mdat, 4); } // Write samples into mdatbox for (let i = 0; i < trakList.length; i++) { let trak = trakList[i]; // duration trak.duration = trak.refSampleDuration * trak.sequenceNumber; for (let j = 0; j < trak.sequenceNumber; j++) { // 遍历 samples let sample = trak.samples[j]; // sample sample.chunkOffset = ftyp.byteLength + free.byteLength + offset; // 合并 data 数据。。。。。。 let data = sample.data; mdatbox.set(data, offset); offset += data.byteLength; } } // Moov box中存放着媒体信息,上面提到的stbl里存放帧信息,属于媒体信息,也在moov box里。 // Moov box 用来描述媒体数据。 // Moov box 主要包含 mvhd、trak、mvex三种子box。 let moov = MP4$3.moov(meta, trakList); // 视频类型(ftyp)、视频数据(mdat)、视频信息(moov) let result = new Uint8Array(ftyp.byteLength + moov.byteLength + mdatbox.byteLength + free.byteLength); // ftyp 视频类型 result.set(ftyp, 0); // free result.set(free, ftyp.byteLength); // mdat 视频数据 result.set(mdatbox, ftyp.byteLength + free.byteLength); // moov 视频信息 result.set(moov, ftyp.byteLength + mdatbox.byteLength + free.byteLength); return result; } // Movie metadata box // 媒体的metadata信息,有且仅有一个,位于moov box中。 // Moov box 主要包含 mvhd、trak、mvex三种子box。 // 视频参数(moov)中主要的子box 为track, // 每个track都是一个随时间变化的媒体序列,时间单位为一个sample,可以是一帧数据, // 或者音频(注意,一帧音频可以分解成多个音频sample,所以音频一般用sample作为单位,而不用帧)。 // Sample按照事件顺序排列。track里面的每个sample通过引用关联到一个sample description。 // 这个sample descriptios定义了怎样解码这个sample,例如使用的压缩算法。(注:在目前的使用中,该值为1) static moov(meta, trakList) { let timescale = meta.timescale; let duration = meta.duration; let trakLen = trakList.length; // Mvhd box定义了整个文件的特性 let mvhd = MP4$3.mvhd(timescale, duration); let trakArrayBuffer = new Uint8Array(); for (let i = 0; i < trakLen; i++) { let trak = MP4$3.trak(trakList[i]); let arrayBuffer = new Uint8Array(trak.byteLength + trakArrayBuffer.byteLength); arrayBuffer.set(trakArrayBuffer, 0); arrayBuffer.set(trak, trakArrayBuffer.byteLength); trakArrayBuffer = new Uint8Array(arrayBuffer.byteLength); trakArrayBuffer.set(arrayBuffer, 0); } return MP4$3.box(MP4$3.types.moov, mvhd, trakArrayBuffer); } // Movie header box // 这里写mp4时需要传入的参数为Time scale 和 Duration,其他的使用默认值即可。 // MP4文件的整体信息,跟具体的视频流、音频流无关,比如创建时间、文件时长等。 // mvhd针对整个影片 // 这里写mp4时需要传入的参数为Time scale 和 Duration,其他的使用默认值即可。 static mvhd(timescale, duration) { // return MP4$3.box(MP4$3.types.mvhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0xCE, 0xBA, 0xFD, 0xA8, // creation_time 文件创建时间; 0xCE, 0xBA, 0xFD, 0xA8, // modification_time 文件修改时间; timescale >>> 24 & 0xFF, // timescale: 4 bytes 一秒包含的时间单位(整数)。举个例子,如果timescale等于1000,那么,一秒包含1000个时间单位(后面track等的时间,都要用这个来换算,比如track的duration为10,000,那么,track的实际时长为10,000/1000=10s); timescale >>> 16 & 0xFF, timescale >>> 8 & 0xFF, timescale & 0xFF, duration >>> 24 & 0xFF, // duration: 4 bytes 影片时长(整数),根据文件中的track的信息推导出来,等于时间最长的track的duration duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x00, 0x01, 0x00, 0x00, // Preferred rate: 1.0 推荐的播放速率,32位整数,高16位、低16位分别代表整数部分、小数部分([16.16]),举例 0x0001 0000 代表1.0,正常播放速度; 0x01, 0x00, 0x00, 0x00, // Preferred Volume(1.0, 2bytes) + reserved(2bytes) 播放音量,16位整数,高8位、低8位分别代表整数部分、小数部分([8.8]),举例 0x01 00 表示 1.0,即最大音量; 0x00, 0x00, 0x00, 0x00, // reserved: 4 + 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, // ----begin composition matrix---- 0x00, 0x00, 0x00, 0x00, // 视频的转换矩阵,一般可以忽略不计; 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // ----end composition matrix---- 0x00, 0x00, 0x00, 0x00, // ----begin pre_defined 6 * 4 bytes---- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ----end pre_defined 6 * 4 bytes---- 0x00, 0x00, 0x00, 0x03 // next_track_ID: 4 bytes 3 32位整数,非0,一般可以忽略不计。当要添加一个新的track到这个影片时,可以使用的track id,必须比当前已经使用的track id要大。也就是说,添加新的track时,需要遍历所有track,确认可用的track id; ])); } // Track box // 一个Track box定义了movie中的一个track。一部movie可以包含一个或多个tracks,它们之间相互独立,各自有各自的时间和空间信息。每个track box 都有与之关联的mdat box。 // 包含媒体数据引用和描述 // 包含modifier track // 流媒体协议的打包信息(hint trak),引用或者复用对应的媒体sample data。 // Hint tracks和modifier tracks必须保证完整性,同时和至少一个media track一起存在。 // 换句话说,即使hint tracks复制了对应的媒体sample data,media tracks 也不能从一部hinted movie中删除。 // 写mp4时仅用到第一个目的,所以这里只介绍媒体数据的引用和描述。 // 一个trak box一般主要包含了tkhd box、 edts box 、mdia box static trak(meta) { return MP4$3.box(MP4$3.types.trak, MP4$3.tkhd(meta), MP4$3.mdia(meta)); } // Track header box // 用来描述trak box的header 信息,定义了一个trak的时间、空间、音量信息。 static tkhd(meta) { let trackId = meta.id, duration = meta.duration; let width = meta.presentWidth, height = meta.presentHeight; return MP4$3.box(MP4$3.types.tkhd, new Uint8Array([0x00, 0x00, 0x00, 0x0F, // version(0) + flags tkhd box的版本; 0xCE, 0xBA, 0xFD, 0xA8, // creation_time 当前track的创建时间; 0xCE, 0xBA, 0xFD, 0xA8, // modification_time 当前track的最近修改时间; trackId >>> 24 & 0xFF, // track_ID: 4 bytes 当前track的唯一标识,不能为0,不能重复; trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF, 0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes duration >>> 24 & 0xFF, // duration: 4 bytes 当前track的完整时长(需要除以timescale得到具体秒数); duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // layer(2bytes) + alternate_group(2bytes) layer 视频轨道的叠加顺序,数字越小越靠近观看者,比如1比2靠上,0比1靠上; alternate_group 当前track的分组ID,alternate_group值相同的track在同一个分组里面。同个分组里的track,同一时间只能有一个track处于播放状态。当alternate_group为0时,表示当前track没有跟其他track处于同个分组。一个分组里面,也可以只有一个track; 0x00, 0x00, 0x00, 0x00, // volume(2bytes) + reserved(2bytes) audio track的音量,介于0.0~1.0之间; 0x00, 0x01, 0x00, 0x00, // ----begin composition matrix---- 0x00, 0x00, 0x00, 0x00, // 视频的变换矩阵; 36 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // ----end composition matrix---- width >>> 8 & 0xFF, // width and height 视频的宽高; 4 width & 0xFF, 0x00, 0x00, height >>> 8 & 0xFF, // width and height 视频的宽高; 4 height & 0xFF, 0x00, 0x00])); } static edts(meta, i) { return MP4$3.box(MP4$3.types.edts, MP4$3.elst(meta, i)); } // 该box为edst box的唯一子box,不是所有的MP4文件都有edst box,这个box是使其对应的trak box的时间戳产生偏移。 // 暂时未发现需要该偏移量的地方,编码时也未对该box进行编码。 static elst(meta, i) { let videoDelayDuration = 0; for (let j = 0; j < i; j++) { if (meta[j].type === 'video') { videoDelayDuration += meta[j].duration; } } let duration = meta[i].duration; if (videoDelayDuration === 0) { videoDelayDuration = meta[i].refSampleDuration; } return MP4$3.box(MP4$3.types.elst, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, videoDelayDuration >>> 24 & 0xFF, // SampleDuration: 4 bytes videoDelayDuration >>> 16 & 0xFF, videoDelayDuration >>> 8 & 0xFF, videoDelayDuration & 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, //media_time 0x00, 0x01, 0x00, 0x00, // media_rate(2byte) + Media rate fraction(3byte) duration >>> 24 & 0xFF, // Duration: 4 bytes duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00])); } // Media Box // 该box定义了trak box的类型和sample的信息。 static mdia(meta) { return MP4$3.box(MP4$3.types.mdia, MP4$3.mdhd(meta), MP4$3.hdlr(meta), MP4$3.minf(meta)); } // Media header box // mdhd box 定义了该box的timescale // 和duration(注:这里的这两个参数与前面说的mvhd有区别,这里的这两个参数都是以一个sample为时间单位的, // 例:在只有一个视频trak的情况下,mvhd的timescale为1000,一个sample的duration为40 // ,那么这里的timescale为1000/40,同理这里的duration算法与之一样理解。) static mdhd(meta) { let timescale = meta.timescale / meta.refSampleDuration; let duration = timescale * meta.duration / meta.timescale; return MP4$3.box(MP4$3.types.mdhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0xCE, 0xBA, 0xFD, 0xA8, // creation_time 0xCE, 0xBA, 0xFD, 0xA8, // modification_time timescale >>> 24 & 0xFF, // timescale: 4 bytes timescale >>> 16 & 0xFF, timescale >>> 8 & 0xFF, timescale & 0xFF, duration >>> 24 & 0xFF, // duration: 4 bytes duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x55, 0xC4, // language: und (undetermined) 0x00, 0x00 // pre_defined = 0 ])); } // Media handler reference box // 声明当前track的类型,以及对应的处理器(handler)。 /** * vide(0x76 69 64 65),video track; soun(0x73 6f 75 6e),audio track; hint(0x68 69 6e 74),hint track; //Hdlr box 定义了这段trak的媒体处理组件,以下图会更清晰的解释这个box * @param meta * @returns {null} */ static hdlr(meta) { let data = null; data = MP4$3.constants.HDLR_VIDEO; return MP4$3.box(MP4$3.types.hdlr, data); } // Media infomation box // 该box也是上面的mdia box的子box,其主要用来描述该trak的具体的媒体处理组件内容的。 static minf(meta) { let xmhd = null; xmhd = MP4$3.box(MP4$3.types.vmhd, MP4$3.constants.VMHD); return MP4$3.box(MP4$3.types.minf, xmhd, MP4$3.dinf(), MP4$3.stbl(meta)); } // Data infomation box // dinf box 定义了该trak的数据信息,包括了数据的引用方式,数据的存储方式等。 // dinf box 用来定义媒体处理组件如何获取媒体数据的 static dinf() { return MP4$3.box(MP4$3.types.dinf, MP4$3.box(MP4$3.types.dref, MP4$3.constants.DREF)); } // Sample table box // Sample Table Box(stbl)是上面minf的子box之一,用来定义存放时间/偏移的映射关系,数据信息都在以下子box中 // 在普通mp4中,在获取数据之前,需要解析每个帧数据所在位置,每个帧数据都存放在mdat中,而这些帧的信息全部存放在stbl box 中, // 所以,若要mp4文件能够正常播放,需要在写mp4文件时,将所有的帧数据信息写入 stbl box中。 // MP4文件的媒体数据部分在mdat box里,而stbl则包含了这些媒体数据的索引以及时间信息,了解stbl对解码、渲染MP4文件很关键。 // 在MP4文件中,媒体数据被分成多个chunk,每个chunk可包含多个sample,而sample则由帧组成(通常1个sample对应1个帧),关系如下: static stbl(meta) { let sampleList = meta.samples; let sampleToChunk = [{ No: 1, num: 0, sampleDelte: 1, chunkNo: 1, duration: sampleList[0].duration }]; let durationList = [sampleList[0].duration]; let len = sampleList.length; for (let i = 0; i < len; i++) { for (let j = 0; j < sampleToChunk.length; j++) { if (sampleList[i].duration === sampleToChunk[j].duration) { sampleToChunk[j].num++; } else { if (durationList.indexOf(sampleList[i].duration) < 0) { durationList.push(sampleList[i].duration); sampleToChunk.push({ No: 2, num: 0, sampleDelte: 1, chunkNo: i + 1, duration: sampleList[i].duration }); } } } } return MP4$3.box(MP4$3.types.stbl, // type: stbl MP4$3.stsd(meta), // Sample Description Table Sample Description Box用来描述数据的格式,比如视频格式为avc,比如音频格式为aac MP4$3.stts(sampleToChunk), // Time-To-Sample 时间戳和Sample序号映射表 MP4$3.stss(sampleList), // 关键帧序号,该box存在于video trak,因为audio trak 中以sample为单位,但多个sample才组成一帧音频,所以在audio trak中无需该box。 MP4$3.stsc(sampleToChunk), //Sample-To-Chunk Sample to chunk 的映射表。这个算法比较巧妙,在多个chunk时,该算法较为复杂。在本次使用中未考虑多个chunk的状态,仅考虑整个文件单个chunk的情况。 MP4$3.stsz(sampleList), // Sample size Sample Size Boxes 每个Sample大小的表。Stz2是另一种sample size的存储算法,更节省空间,使用时使用其中一种即可,这里使用stsz。原因简单,因为算法容易。 MP4$3.stco(sampleToChunk, sampleList) // Chunk offset co64: 每个Chunk位置偏移表,sample的偏移可根据其他box推算出来,co64是指64位的chunk偏移,暂时只使用到32位的,因此这里使用stco即可。 ); } // 每个sample的时长; // stts包含了DTS到sample number的映射表,主要用来推导每个帧的时长。 static stts(sampleToChunk) { let sampleToChunkLen = sampleToChunk.length; let stts = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags sampleToChunkLen >>> 24 & 0xFF, // entry_count: 4 bytes tts 中包含的entry条目数; sampleToChunkLen >>> 16 & 0xFF, sampleToChunkLen >>> 8 & 0xFF, sampleToChunkLen & 0xFF]); let offset = stts.byteLength, sttsInfo = new Uint8Array(offset + sampleToChunkLen * 8); sttsInfo.set(stts, 0); for (let index = 0; index < sampleToChunkLen; index++) { sttsInfo.set(new Uint8Array([sampleToChunk[index].num >>> 24 & 0xFF, // samplesPerChunk: 4 bytes sampleToChunk[index].num >>> 16 & 0xFF, sampleToChunk[index].num >>> 8 & 0xFF, sampleToChunk[index].num & 0xFF, sampleToChunk[index].sampleDelte >>> 24 & 0xFF, // samplesDescription index: 4 bytes sampleToChunk[index].sampleDelte >>> 16 & 0xFF, sampleToChunk[index].sampleDelte >>> 8 & 0xFF, sampleToChunk[index].sampleDelte & 0xFF]), offset); offset += 8; } return MP4$3.box(MP4$3.types.stts, sttsInfo); } // 哪些sample是关键帧; // mp4文件中,关键帧所在的sample序号。如果没有stss的话,所有的sample中都是关键帧。 static stss(mdatDataList) { let keyFrameMap = [], len = mdatDataList.length; for (let i = 0; i < len; i++) { if (mdatDataList[i].isKeyframe === true) { keyFrameMap.push(i + 1); } } let keyFrameLen = keyFrameMap.length; let stss = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags keyFrameLen >>> 24 & 0xFF, // entry_count: 4 bytes entry的条目数,可以认为是关键帧的数目; keyFrameLen >>> 16 & 0xFF, keyFrameLen >>> 8 & 0xFF, keyFrameLen & 0xFF]); let offset = stss.byteLength, stssInfo = new Uint8Array(offset + keyFrameLen * 4); stssInfo.set(stss, 0); for (let index = 0; index < keyFrameLen; index++) { stssInfo.set(new Uint8Array([keyFrameMap[index] >>> 24 & 0xFF, // entry_count: 4 bytes keyFrameMap[index] >>> 16 & 0xFF, keyFrameMap[index] >>> 8 & 0xFF, keyFrameMap[index] & 0xFF]), offset); offset += 4; } return MP4$3.box(MP4$3.types.stss, stssInfo); } // 每个thunk中包含几个sample; // sample 以 chunk 为单位分成多个组。chunk的size可以是不同的,chunk里面的sample的size也可以是不同的。 static stsc(sampleToChunk) { let sampleToChunkLen = sampleToChunk.length; let stsc = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags sampleToChunkLen >>> 24 & 0xFF, // entry_count: 4 bytes 有多少个表项(每个表项,包含first_chunk、samples_per_chunk、sample_description_index信息); sampleToChunkLen >>> 16 & 0xFF, sampleToChunkLen >>> 8 & 0xFF, sampleToChunkLen & 0xFF]); let offset = stsc.byteLength, stscInfo = new Uint8Array(offset + sampleToChunkLen * 12); stscInfo.set(stsc, 0); for (let index = 0; index < sampleToChunkLen; index++) { let firstChunk = sampleToChunk[index].chunkNo, samplesPerChunk = sampleToChunk[index].num, sampleDelte = sampleToChunk[index].sampleDelte; stscInfo.set(new Uint8Array([firstChunk >>> 24 & 0xFF, // firstChunk: 4 bytes 当前表项中,对应的第一个chunk的序号; firstChunk >>> 16 & 0xFF, firstChunk >>> 8 & 0xFF, firstChunk & 0xFF, samplesPerChunk >>> 24 & 0xFF, // samplesPerChunk: 4 bytes 每个chunk包含的sample数; samplesPerChunk >>> 16 & 0xFF, samplesPerChunk >>> 8 & 0xFF, samplesPerChunk & 0xFF, sampleDelte >>> 24 & 0xFF, // samplesDescription index: 4 bytes 指向 stsd 中 sample description 的索引值(参考stsd小节); sampleDelte >>> 16 & 0xFF, sampleDelte >>> 8 & 0xFF, sampleDelte & 0xFF]), offset); offset += 12; } return MP4$3.box(MP4$3.types.stsc, stscInfo); } // 每个sample的size(单位是字节); // 每个sample的大小(字节),根据 sample_size 字段,可以知道当前track包含了多少个sample(或帧)。 static stsz(mdatDataList) { let len = mdatDataList.length; let stsz = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // sample size 当所有sample的size都一样时,该值为sample的size ,否则为0 len >>> 24 & 0xFF, // sample count: 4 bytes 当前track里面的sample数目。如果 sample_size==0,那么,sample_count 等于下面entry的条目; len >>> 16 & 0xFF, len >>> 8 & 0xFF, len & 0xFF]); let offset = stsz.byteLength, stszInfo = new Uint8Array(offset + len * 4); stszInfo.set(stsz, 0); for (let i = 0; i < len; i++) { let data = mdatDataList[i].data; let dataLen = data.byteLength; stszInfo.set(new Uint8Array([dataLen >>> 24 & 0xFF, //per sample size: 4 bytes dataLen >>> 16 & 0xFF, dataLen >>> 8 & 0xFF, dataLen & 0xFF]), offset); offset += 4; } return MP4$3.box(MP4$3.types.stsz, stszInfo); } // thunk在文件中的偏移; // 存储了该 track 中每个 chunk 在文件中的偏移。 static stco(sampleToChunk, mdatDataList) { let offset = mdatDataList[0].chunkOffset; return MP4$3.box(MP4$3.types.stco, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x01, // entry_count: 4 bytes // 默认只有视频时,只有一段chunk offset >>> 24 & 0xFF, // samplesPerChunk: 4 bytes offset >>> 16 & 0xFF, offset >>> 8 & 0xFF, offset & 0xFF])); } // Sample description box // 给出视频、音频的编码、宽高、音量等信息,以及每个sample中包含多少个frame; static stsd(meta) { if (meta.type === 'audio') { // if (meta.codec === 'mp3') { return MP4$3.box(MP4$3.types.stsd, MP4$3.constants.STSD_PREFIX, MP4$3.mp3(meta)); } // else: aac -> mp4a return MP4$3.box(MP4$3.types.stsd, MP4$3.constants.STSD_PREFIX, MP4$3.mp4a(meta)); } else { if (meta.videoType === 'avc') { return MP4$3.box(MP4$3.types.stsd, MP4$3.constants.STSD_PREFIX, MP4$3.avc1(meta)); } else { return MP4$3.box(MP4$3.types.stsd, MP4$3.constants.STSD_PREFIX, MP4$3.hvc1(meta)); } // } } static mp3(meta) { let channelCount = meta.channelCount; let sampleRate = meta.sampleRate; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // reserved(4) 0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2) 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, channelCount, // channelCount(2) 0x00, 0x10, // sampleSize(2) 0x00, 0x00, 0x00, 0x00, // reserved(4) sampleRate >>> 8 & 0xFF, // Audio sample rate sampleRate & 0xFF, 0x00, 0x00]); return MP4$3.box(MP4$3.types['.mp3'], data); } static mp4a(meta) { let channelCount = meta.channelCount; let sampleRate = meta.sampleRate; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // reserved(4) 0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2) 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, channelCount, // channelCount(2) 0x00, 0x10, // sampleSize(2) 0x00, 0x00, 0x00, 0x00, // reserved(4) sampleRate >>> 8 & 0xFF, // Audio sample rate sampleRate & 0xFF, 0x00, 0x00]); return MP4$3.box(MP4$3.types.mp4a, data, MP4$3.esds(meta)); } static esds(meta) { let config = meta.config || []; let configSize = config.length; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version 0 + flags 0x03, // descriptor_type 0x17 + configSize, // length3 0x00, 0x01, // es_id 0x00, // stream_priority 0x04, // descriptor_type 0x0F + configSize, // length 0x40, // codec: mpeg4_audio 0x15, // stream_type: Audio 0x00, 0x00, 0x00, // buffer_size 0x00, 0x00, 0x00, 0x00, // maxBitrate 0x00, 0x00, 0x00, 0x00, // avgBitrate 0x05 // descriptor_type ].concat([configSize]).concat(config).concat([0x06, 0x01, 0x02 // GASpecificConfig ])); return MP4$3.box(MP4$3.types.esds, data); } static avc1(meta) { let avcc = meta.avcc; let width = meta.codecWidth, height = meta.codecHeight; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // reserved(4) 0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2) 0x00, 0x00, 0x00, 0x00, // pre_defined(2) + reserved(2) 0x00, 0x00, 0x00, 0x00, // pre_defined: 3 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, width >>> 8 & 0xFF, // width: 2 bytes width & 0xFF, height >>> 8 & 0xFF, // height: 2 bytes height & 0xFF, 0x00, 0x48, 0x00, 0x00, // horizresolution: 4 bytes 0x00, 0x48, 0x00, 0x00, // vertresolution: 4 bytes 0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes 0x00, 0x01, // frame_count 0x0D, // strlen 10bytes 0x6a, 0x65, 0x73, 0x73, // compressorname: 32 bytes 0x69, 0x62, 0x75, 0x63, 0x61, 0x2d, 0x70, 0x72, 0x6f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, // depth 0xFF, 0xFF // pre_defined = -1 ]); return MP4$3.box(MP4$3.types.avc1, data, MP4$3.box(MP4$3.types.avcC, avcc)); } // hvc static hvc1(meta) { let avcc = meta.avcc; const width = meta.codecWidth; const height = meta.codecHeight; let data = new Uint8Array([0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, width >>> 8 & 255, width & 255, height >>> 8 & 255, height & 255, 0, 72, 0, 0, 0, 72, 0, 0, 0, 0, 0, 0, 0, 1, 13, 106, 101, 115, 115, 105, 98, 117, 99, 97, 45, 112, 114, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 255, 255]); return MP4$3.box(MP4$3.types.hvc1, data, MP4$3.box(MP4$3.types.hvcC, avcc)); } // Movie Extends box static mvex(meta) { return MP4$3.box(MP4$3.types.mvex, MP4$3.trex(meta)); } // Track Extends box // 用来给 fMP4 的 sample 设置各种默认值,比如时长、大小等。 static trex(meta) { let trackId = meta.id; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags trackId >>> 24 & 0xFF, // track_ID 对应的 track 的 ID,比如video track、audio track 的ID; trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF, 0x00, 0x00, 0x00, 0x01, // default_sample_description_index sample description 的默认 index(指向stsd); 0x00, 0x00, 0x00, 0x00, // default_sample_duration sample 默认时长,一般为0; 0x00, 0x00, 0x00, 0x00, // default_sample_size sample 默认大小,一般为0; 0x00, 0x01, 0x00, 0x01 // default_sample_flags sample 的默认flag,一般为0; ]); return MP4$3.box(MP4$3.types.trex, data); } // Movie fragment box // moof是个container box,相关 metadata 在内嵌box里,比如 mfhd、 tfhd、trun 等。 static moof(meta, baseMediaDecodeTime) { return MP4$3.box(MP4$3.types.moof, MP4$3.mfhd(meta.sequenceNumber), MP4$3.traf(meta, baseMediaDecodeTime)); } // 结构比较简单,sequence_number 为 movie fragment 的序列号。根据 movie fragment 产生的顺序,从1开始递增。 static mfhd(sequenceNumber) { let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, sequenceNumber >>> 24 & 0xFF, // sequence_number: int32 sequenceNumber >>> 16 & 0xFF, sequenceNumber >>> 8 & 0xFF, sequenceNumber & 0xFF]); return MP4$3.box(MP4$3.types.mfhd, data); } // Track fragment box // 对 fmp4 来说,数据被氛围多个 movie fragment。一个 movie fragment 可包含多个track fragment(每个 track 包含0或多个 track fragment)。每个 track fragment 中,可以包含多个该 track 的 sample。 // 每个 track fragment 中,包含多个 track run,每个 track run 代表一组连续的 sample。 static traf(meta, baseMediaDecodeTime) { let trackId = meta.id; // Track fragment header box // tfhd 用来设置 track fragment 中 的 sample 的 metadata 的默认值。 let tfhd = MP4$3.box(MP4$3.types.tfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) & flags trackId >>> 24 & 0xFF, // track_ID trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF])); // Track Fragment Decode Time let tfdt = MP4$3.box(MP4$3.types.tfdt, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) & flags baseMediaDecodeTime >>> 24 & 0xFF, // baseMediaDecodeTime: int32 baseMediaDecodeTime >>> 16 & 0xFF, baseMediaDecodeTime >>> 8 & 0xFF, baseMediaDecodeTime & 0xFF])); let sdtp = MP4$3.sdtp(meta); let trun = MP4$3.trun(meta, sdtp.byteLength + 16 + 16 + 8 + 16 + 8 + 8); return MP4$3.box(MP4$3.types.traf, tfhd, tfdt, trun, sdtp); } // Sample Dependency Type box static sdtp(meta) { let samples = meta.samples || []; let sampleCount = samples.length; let data = new Uint8Array(4 + sampleCount); // 0~4 bytes: version(0) & flags for (let i = 0; i < sampleCount; i++) { let flags = samples[i].flags; data[i + 4] = flags.isLeading << 6 // is_leading: 2 (bit) | flags.dependsOn << 4 // sample_depends_on | flags.isDependedOn << 2 // sample_is_depended_on | flags.hasRedundancy; // sample_has_redundancy } return MP4$3.box(MP4$3.types.sdtp, data); } // Track fragment run box static trun(meta, offset) { let samples = meta.samples || []; let sampleCount = samples.length; let dataSize = 12 + 16 * sampleCount; let data = new Uint8Array(dataSize); offset += 8 + dataSize; data.set([0x00, 0x00, 0x0F, 0x01, // version(0) & flags sampleCount >>> 24 & 0xFF, // sample_count sampleCount >>> 16 & 0xFF, sampleCount >>> 8 & 0xFF, sampleCount & 0xFF, offset >>> 24 & 0xFF, // data_offset offset >>> 16 & 0xFF, offset >>> 8 & 0xFF, offset & 0xFF], 0); for (let i = 0; i < sampleCount; i++) { let duration = samples[i].duration; let size = samples[i].size; let flags = samples[i].flags; let cts = samples[i].cts; data.set([duration >>> 24 & 0xFF, // sample_duration duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, size >>> 24 & 0xFF, // sample_size size >>> 16 & 0xFF, size >>> 8 & 0xFF, size & 0xFF, flags.isLeading << 2 | flags.dependsOn, // sample_flags flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.isNonSync, 0x00, 0x00, // sample_degradation_priority cts >>> 24 & 0xFF, // sample_composition_time_offset cts >>> 16 & 0xFF, cts >>> 8 & 0xFF, cts & 0xFF], 12 + 16 * i); } return MP4$3.box(MP4$3.types.trun, data); } // Media Data Box,存放实际的媒体数据,一般有多个 // Mdat box 中包含了MP4文件的媒体数据,在文件中的位置可以在moov的前面,也可以在moov的后面, // 因我们这里用到MP4文件格式用来写mp4文件,需要计算每一帧媒体数据在文件中的偏移量,为了方便计算,mdat放置moov前面。 // Mdat box数据格式单一,无子box。主要分为box header 和box body,box header中存放box size 和box type(mdat),box body中存放所有媒体数据,媒体数据以sample为数据单元。 // 这里使用时,视频数据中,每一个sample是一个视频帧,存放sample时,需要根据帧数据类型进行拼帧处理后存放。 static mdat(data) { return MP4$3.box(MP4$3.types.mdat, data); } } MP4$3.init(); class MP4RecorderLoader extends CommonLoader$1 { constructor(player) { super(player); this.TAG_NAME = 'recorderMP4'; this._reset(); player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this._reset(); this.player.debug.log(this.TAG_NAME, 'destroy'); } _reset() { super._reset(); this.totalDuration = 0; this.totalAudioDuration = 0; this.totalByteLength = 0; this.totalAudioByteLength = 0; this.bufferList = []; this.audioBufferList = []; this.cacheTrack = {}; this.audioCacheTrack = {}; this.sequenceNumber = 0; this.audioSequenceNumber = 0; } startRecord() { const debug = this.player.debug; this._isRecording = true; this.player.emit(EVENTS.recording, true); debug.log(this.TAG_NAME, 'start recording'); this.player.emit(EVENTS.recordStart); this.startRecordingInterval(); } // override startRecordingInterval() { this.stopRecordingInterval(); this.recordingInterval = window.setInterval(() => { this.player.emit(EVENTS.recordingTimestamp, this.getTotalDuration()); }, 1000); } formatFmp4Track(payload, isIframe, dts, cts) { const track = { id: 1, sequenceNumber: ++this.sequenceNumber, size: payload.byteLength, dts: dts, cts: cts, isKeyframe: isIframe, data: payload, duration: 0, flags: { isLeading: 0, dependsOn: isIframe ? 2 : 1, isDependedOn: isIframe ? 1 : 0, hasRedundancy: 0, isNonSync: isIframe ? 0 : 1 } }; return track; } formatAudioFmp4Track(payload, dts) { const audioTrack = { id: 2, sequenceNumber: ++this.audioSequenceNumber, size: payload.byteLength, dts: dts, pts: dts, cts: 0, data: new Uint8Array(payload), duration: 0, originalDts: dts, flags: { isLeading: 0, dependsOn: 1, isDependedOn: 0, hasRedundancy: 0 } }; return audioTrack; } handleAddNaluTrack(payload, isIframe, dts, cts) { if (this.cacheTrack.id && dts >= this.cacheTrack.dts) { // 主要就是为了这个duration 字段 this.cacheTrack.duration = dts - this.cacheTrack.dts; this.handleAddFmp4Track(this.cacheTrack); } else { this.cacheTrack = {}; } this.cacheTrack = this.formatFmp4Track(payload, isIframe, dts, cts); } handleAddAudioTrack(payload, dts) { // if (this.audioCacheTrack.id && dts >= this.audioCacheTrack.dts) { // this.audioCacheTrack.duration = dts - this.audioCacheTrack.dts; // this.handleAddFmp4AudioTrack(this.audioCacheTrack); // } else { // this.audioCacheTrack = {}; // } // this.audioCacheTrack = this.formatAudioFmp4Track(payload, dts); } // handleAddFmp4Track(track) { if (!this.isRecording) { this.player.debug.error(this.TAG_NAME, 'handleAddFmp4Track, isRecording is false '); return; } if (!(this.sps !== null && this.pps !== null) && this.isH264) { this.player.debug.error(this.TAG_NAME, 'handleAddFmp4Track, is h264 and this.sps or this.pps is null '); return; } if (!(this.sps !== null && this.pps !== null && this.vps !== null) && this.isH265) { this.player.debug.error(this.TAG_NAME, 'handleAddFmp4Track, is h265 and this.sps or this.pps or this.vps is null '); return; } const trackItem = Object.assign({}, track); trackItem.pts = trackItem.dts + trackItem.cts; const oldData = trackItem.data; if (trackItem.isKeyframe) { if (this.isH264) { const drFlag = new Uint8Array(this.sps.byteLength + this.pps.byteLength); drFlag.set(this.sps, 0); drFlag.set(this.pps, this.sps.byteLength); const newData = new Uint8Array(drFlag.byteLength + oldData.byteLength); newData.set(drFlag, 0); newData.set(oldData, drFlag.byteLength); trackItem.data = newData; } else if (this.isH265) { const drFlag = new Uint8Array(this.sps.byteLength + this.pps.byteLength + this.vps.byteLength); drFlag.set(this.vps, 0); drFlag.set(this.sps, this.vps.byteLength); drFlag.set(this.pps, this.vps.byteLength + this.sps.byteLength); const newData = new Uint8Array(drFlag.byteLength + oldData.byteLength); newData.set(drFlag, 0); newData.set(oldData, drFlag.byteLength); trackItem.data = newData; } } trackItem.size = trackItem.data.byteLength; this.totalDuration += trackItem.duration; this.totalByteLength += trackItem.data.byteLength; trackItem.duration = 0; trackItem.originalDts = trackItem.dts; delete trackItem.id; delete trackItem.sequenceNumber; this.bufferList.push(trackItem); } handleAddFmp4AudioTrack(track) { const trackItem = Object.assign({}, track); trackItem.pts = trackItem.dts + trackItem.cts; trackItem.size = trackItem.data.byteLength; this.totalAudioDuration += trackItem.duration; this.totalAudioByteLength += trackItem.data.byteLength; trackItem.duration = 0; trackItem.originalDts = trackItem.dts; delete trackItem.id; delete trackItem.sequenceNumber; this.audioBufferList.push(trackItem); } getTotalDuration() { return this.totalDuration / 1000; } getType() { return FILE_SUFFIX.mp4; } // getToTalByteLength() { return this.totalByteLength + this.totalAudioByteLength; } stopRecordAndSave() { let type = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : RECORDING_TYPE.download; let fileName = arguments.length > 1 ? arguments[1] : undefined; return new Promise((resolve, reject) => { if (!this.isRecording) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, isRecording is false '); return reject('stop recording fail, isRecording is false '); } if (this.bufferList.length === 0) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, this.bufferList.length is 0 '); return reject('stop recording fail, this.bufferList.length is 0 '); } if (fileName) { this.setFileName(fileName); } const trakItemVideo = { id: 1, type: 'video', sps: this.sps, pps: this.pps, samples: this.bufferList, sequenceNumber: this.bufferList.length, length: 0, addSampleNum: 1, duration: 0, ...this.metaInfo }; const trakItemAudio = { id: 2, type: 'audio', sequenceNumber: this.audioBufferList.length, samples: this.audioBufferList, ...this.audioMetaInfo }; const trackList = [trakItemVideo]; if (trakItemAudio.samples.length > 0) { trackList.push(trakItemAudio); } this.player.debug.log(this.TAG_NAME, `trackList length is ${trackList.length}`); // console.error('trackList', trackList, this.totalByteLength, this.totalAudioByteLength, this.totalDuration, this.totalAudioDuration); const metaBox = MP4$3.generateInitSegment({ timescale: 1000, // flv 默认为1000 duration: this.totalDuration }, trackList, this.totalByteLength + this.totalAudioByteLength); this.player.debug.log(this.TAG_NAME, 'stop recording'); const blob = new Blob([metaBox], { 'type': 'application/octet-stream' }); if (type === RECORDING_TYPE.blob) { resolve(blob); this.player.emit(EVENTS.recordBlob, blob); } else { resolve(); const fileName = (this.fileName || now$2()) + '.' + FILE_SUFFIX.mp4; saveBlobToFile(fileName, blob); } this._reset(); this.player.emit(EVENTS.recording, false); }); } } function mergeArrayBuffer$2(bufferList) { const Cons = bufferList[0].constructor; return bufferList.reduce((pre, val) => { const merge = new Cons((pre.byteLength | 0) + (val.byteLength | 0)); merge.set(pre, 0); merge.set(val, pre.byteLength | 0); return merge; }, new Cons()); } class FlvRecorderLoader extends CommonLoader$1 { constructor(player) { super(player); this.TAG_NAME = 'FlvRecorderLoader'; this.player = player; this._init(); this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this._init(); this.player.debug.log(this.TAG_NAME, 'destroy'); } _init() { this.hasAudio = false; this.hasVideo = false; this.startTime = null; this.currentTime = 0; this.prevTimestamp = 0; this.totalByteLength = 0; this.totalDuration = 0; this.flvMetaData = null; this.aacSequenceHeader = null; this.videoSequenceHeader = null; this.bufferList = []; } _reset() { super._reset(); this._init(); } startRecord() { const debug = this.player.debug; this._isRecording = true; this.player.emit(EVENTS.recording, true); debug.log(this.TAG_NAME, 'start recording'); this.player.emit(EVENTS.recordStart); this.startRecordingInterval(); } // override startRecordingInterval() { this.stopRecordingInterval(); this.recordingInterval = window.setInterval(() => { this.player.emit(EVENTS.recordingTimestamp, this.getTotalDuration()); }, 1000); } addMetaData(arrayBuffer) { this.flvMetaData = arrayBuffer; } addAACSequenceHeader(arrayBuffer) { this.aacSequenceHeader = arrayBuffer; } addVideoSequenceHeader(arrayBuffer) { this.videoSequenceHeader = arrayBuffer; } addVideo(arrayBuffer, ts) { this._setStartTime(ts); const newTs = this._getBufferTs(ts); this.hasVideo = true; this._createBufferItem(arrayBuffer, FLV_MEDIA_TYPE.video, newTs); } addAudio(arrayBuffer, ts) { this._setStartTime(ts); const newTs = this._getBufferTs(ts); this.hasAudio = true; this._createBufferItem(arrayBuffer, FLV_MEDIA_TYPE.audio, newTs); } _setStartTime(ts) { if (this.startTime === null && this._isRecording) { this.startTime = ts; this.player.debug.log(this.TAG_NAME, `_setStartTime is ${ts}`); } } _getBufferTs(ts) { if (ts > this.currentTime) { this.currentTime = ts; } let result = 0; if (this.startTime && ts >= this.startTime) { result = ts - this.startTime; } if (result > this.prevTimestamp) { this.prevTimestamp = result; } else { result = this.prevTimestamp; } return result; } _createBufferItem(uint8Array, type, ts) { const packet = this._createFlvPacket(uint8Array, type, ts); const tag = this._createFlvTag(packet); this.totalByteLength += tag.byteLength; this.bufferList.push(tag); } _createFlvTag(packet) { let PreviousTagSize = 11 + packet.header.length; let tagBuffer = new Uint8Array(PreviousTagSize + 4); tagBuffer[0] = packet.header.type; let dv = new DataView(tagBuffer.buffer); tagBuffer[1] = packet.header.length >> 16 & 0xff; tagBuffer[2] = packet.header.length >> 8 & 0xff; tagBuffer[3] = packet.header.length & 0xff; tagBuffer[4] = packet.header.timestamp >> 16 & 0xff; tagBuffer[5] = packet.header.timestamp >> 8 & 0xff; tagBuffer[6] = packet.header.timestamp & 0xff; tagBuffer[7] = packet.header.timestamp >> 24 & 0xff; tagBuffer[8] = 0; tagBuffer[9] = 0; tagBuffer[10] = 0; dv.setUint32(PreviousTagSize, PreviousTagSize); tagBuffer.set(packet.payload.subarray(0, packet.header.length), 11); return tagBuffer; } _createFlvPacket() { let payload = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; let type = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; let time = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; return { header: { length: payload ? payload.length : 0, timestamp: time, type: type }, payload: payload }; } stopRecordAndSave() { let type = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : RECORDING_TYPE.download; let fileName = arguments.length > 1 ? arguments[1] : undefined; return new Promise((resolve, reject) => { if (!this.isRecording) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, isRecording is false '); return reject('stop recording fail, isRecording is false '); } if (this.bufferList.length === 0) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, this.bufferList.length is 0 '); return reject('stop recording fail, this.bufferList.length is 0 '); } if (fileName) { this.setFileName(fileName); } const flvHeader = new Uint8Array([0x46, 0x4c, 0x56, // flv 0x01, 0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00]); if (this.hasVideo) { flvHeader[4] |= 0b00000001; } if (this.hasAudio) { flvHeader[4] |= 0b00000100; } let tempBufferList = [flvHeader]; // script tag if (this.flvMetaData) { const packet = this._createFlvPacket(this.flvMetaData, FLV_MEDIA_TYPE.scriptData); const tag = this._createFlvTag(packet); tempBufferList.push(tag); } // video sequence header if (this.videoSequenceHeader) { const packet = this._createFlvPacket(this.videoSequenceHeader, FLV_MEDIA_TYPE.video); const tag = this._createFlvTag(packet); tempBufferList.push(tag); } // aac sequence header if (this.aacSequenceHeader) { const packet = this._createFlvPacket(this.aacSequenceHeader, FLV_MEDIA_TYPE.audio); const tag = this._createFlvTag(packet); tempBufferList.push(tag); } let bufferList = tempBufferList.concat(this.bufferList); const mergeBuffer = mergeArrayBuffer$2(bufferList); this.player.debug.log(this.TAG_NAME, 'stop recording'); const blob = new Blob([mergeBuffer], { 'type': 'application/octet-stream' }); if (type === RECORDING_TYPE.blob) { resolve(blob); this.player.emit(EVENTS.recordBlob, blob); } else { resolve(); const fileName = (this.fileName || now$2()) + '.' + FILE_SUFFIX.flv; saveBlobToFile(fileName, blob); } this._reset(); this.player.emit(EVENTS.recording, false); }); } getTotalDuration() { let result = 0; if (this.startTime !== null && this.currentTime !== null) { result = this.currentTime - this.startTime; } // 四舍五入 return Math.round(result / 1000); } getType() { return FILE_SUFFIX.flv; } // getToTalByteLength() { return this.totalByteLength; } } const mp3FrameParseStats = { init: 0, findFirstStartCode: 1, findSecondStartCode: 2 }; class Mp3FrameParseLoader extends Emitter { constructor(player) { super(); this.player = player; this.isDestroyed = false; this.reset(); } destroy() { this.isDestroyed = false; this.off(); this.reset(); } reset() { this.stats = mp3FrameParseStats.init; this.tempBuffer = new Uint8Array(0); this.parsedOffset = 0; this.versionLayer = 0; } dispatch(data, ts) { let newBuffer = new Uint8Array(this.tempBuffer.length + data.length); newBuffer.set(this.tempBuffer, 0); newBuffer.set(data, this.tempBuffer.length); this.tempBuffer = newBuffer; while (1) { if (this.isDestroyed) { break; } if (this.state == mp3FrameParseStats.Init) { let bf = false; while (this.tempBuffer.length - this.parsedOffset >= 2) { if (this.isDestroyed) { break; } // mp3帧头是连续11bit等于1 if (this.tempBuffer[this.parsedOffset] != 0xFF) { this.parsedOffset++; continue; } if (this.tempBuffer[this.parsedOffset + 1] & 0xE0 != 0xE0) { this.parsedOffset++; continue; } this.versionLayer = this.tempBuffer[this.parsedOffset + 1]; this.state = mp3FrameParseStats.findFirstStartCode; this.fisrtStartCodeOffset = this.parsedOffset; this.parsedOffset += 2; bf = true; break; } if (bf) { continue; } else { break; } } else if (this.state == mp3FrameParseStats.findFirstStartCode) { let bf = false; while (this.tempBuffer.length - this.parsedOffset >= 2) { if (this.isDestroyed) { break; } if (this.tempBuffer[this.parsedOffset] != 0xFF) { this.parsedOffset++; continue; } if (this.tempBuffer[this.parsedOffset + 1] != this.versionLayer) { this.parsedOffset++; continue; } this.state = mp3FrameParseStats.findSecondStartCode; this.secondStartCodeOffset = this.parsedOffset; this.parsedOffset += 2; bf = true; break; } if (bf) { continue; } else { break; } } else if (this.state == mp3FrameParseStats.findSecondStartCode) { let lastFrame = this.tempBuffer.slice(this.fisrtStartCodeOffset, this.secondStartCodeOffset); this.emit('data', lastFrame, ts); this.tempBuffer = this.tempBuffer.slice(this.secondStartCodeOffset); this.fisrtStartCodeOffset = 0; this.parsedOffset = 2; this.state = mp3FrameParseStats.findFirstStartCode; } } } } class WasmMp4Loader extends CommonLoader$1 { constructor(player) { super(player); this.TAG_NAME = 'recorderWasmMP4'; this._reset(); // config wasmMp4Recorder this.wasmMp4Recorder = new window.JessibucaProMp4Recorder({ debug: player._opt.debug, debugLevel: player._opt.debugLevel, debugUuid: player._opt.debugUuid, decoder: player._opt.wasmMp4RecorderDecoder }); this.mp3Demuxer = null; this.isG711 = false; player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); if (this.mp3Demuxer) { this.mp3Demuxer.destroy(); this.mp3Demuxer = null; } this.isG711 = false; this._reset(); this.player.debug.log(this.TAG_NAME, 'destroy'); } _reset() { super._reset(); this.cacheTrack = {}; this.audioCacheTrack = {}; this.totalDuration = 0; this.totalAudioDuration = 0; this.totalByteLength = 0; this.totalAudioByteLength = 0; this.hasAudio = false; this.hasVideo = false; } getType() { return FILE_SUFFIX.mp4; } isWasmMp4() { return true; } getTotalDuration() { return this.totalDuration / 1000; } // getToTalByteLength() { return this.totalByteLength + this.totalAudioByteLength; } startRecord() { const debug = this.player.debug; const audioInfo = this.player.getAudioInfo(); const videoInfo = this.player.getVideoInfo(); const params = {}; if (this.codecId) { const video = { type: this.codecId, width: videoInfo.width, height: videoInfo.height, extraData: this.metaInfo.avcc }; params.video = video; this.hasVideo = true; } if (audioInfo.encTypeCode) { this.isG711 = audioInfo.encTypeCode === AUDIO_ENC_CODE.ALAW || audioInfo.encTypeCode === AUDIO_ENC_CODE.MULAW; const audio = { type: audioInfo.encTypeCode, sampleRate: audioInfo.sampleRate, channels: audioInfo.channels, extraData: this.audioMetaInfo.extraData, depth: audioInfo.depth }; this.audioCodeId = audioInfo.encTypeCode; params.audio = audio; this.hasAudio = true; } this.wasmMp4Recorder.startRecord(params).then(() => { this._isRecording = true; this.player.emit(EVENTS.recording, true); debug.log(this.TAG_NAME, 'start recording'); this.player.emit(EVENTS.recordStart); this.startRecordingInterval(); }).catch(e => { debug.error(this.TAG_NAME, 'startRecord error', e); this.player.emitError(EVENTS.recordCreateError, e); }); } startRecordingInterval() { this.stopRecordingInterval(); this.recordingInterval = window.setInterval(() => { this.player.emit(EVENTS.recordingTimestamp, this.getTotalDuration()); }, 1000); } stopRecordAndSave() { let type = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : RECORDING_TYPE.download; let fileName = arguments.length > 1 ? arguments[1] : undefined; return new Promise((resolve, reject) => { if (!this.isRecording) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, isRecording is false '); return reject('stop recording fail, isRecording is false '); } if (this.totalDuration === 0) { this.player.debug.error(this.TAG_NAME, 'stop recording fail, totalDuration is 0 '); return reject('stop recording fail, totalDuration is 0 '); } if (fileName) { this.setFileName(fileName); } this.wasmMp4Recorder.stopRecord().then(blob => { if (type === RECORDING_TYPE.blob) { resolve(blob); this.player.emit(EVENTS.recordBlob, blob); } else { resolve(); const suffix = this.isG711 ? FILE_SUFFIX.mov : FILE_SUFFIX.mp4; const fileName = (this.fileName || now$2()) + '.' + suffix; saveBlobToFile(fileName, blob); } }).catch(e => { this.player.debug.error(this.TAG_NAME, 'stopRecord error', e); reject(e); }).finally(() => { this._reset(); this.player.emit(EVENTS.recording, false); }); }); } handleAddAudioTrack(payload, dts) { if (this.audioCodeId === AUDIO_ENC_CODE.MP3) { if (!this.mp3Demuxer) { this.mp3Demuxer = new Mp3FrameParseLoader(this.player); this.mp3Demuxer.on('data', (audioBuffer, audioTs) => { this._handleAddAudioTrack(audioBuffer, audioTs); }); } this.mp3Demuxer.dispatch(payload, dts); } else { this._handleAddAudioTrack(payload, dts); } } _handleAddAudioTrack(payload, dts) { if (isFalse(this.hasAudio)) { return; } if (this.audioCacheTrack.id && dts >= this.audioCacheTrack.dts) { this.audioCacheTrack.duration = dts - this.audioCacheTrack.dts; this.totalAudioDuration += this.audioCacheTrack.duration; this.totalAudioByteLength += this.audioCacheTrack.payload.byteLength; this.wasmMp4Recorder.sendAudioFrame(this.audioCacheTrack.payload, this.audioCacheTrack.dts); } else { this.audioCacheTrack = {}; } this.audioCacheTrack = { id: 2, payload: payload, dts: dts }; } handleAddNaluTrack(payload, isIFrame, dts, cts) { if (isFalse(this.hasVideo)) { return; } if (this.cacheTrack.id && dts >= this.cacheTrack.dts) { this.cacheTrack.duration = dts - this.cacheTrack.dts; this.totalDuration += this.cacheTrack.duration; this.totalByteLength += this.cacheTrack.payload.byteLength; this.wasmMp4Recorder.sendVideoFrame(this.cacheTrack.payload, this.cacheTrack.isIFrame, this.cacheTrack.dts, this.cacheTrack.cts); } else { this.cacheTrack = {}; } this.cacheTrack = { id: 1, payload: payload, isIFrame: isIFrame, dts: dts, cts: cts }; } } class Recorder { constructor(player) { const Loader = Recorder.getLoaderFactory(player._opt); return new Loader(player); } static getLoaderFactory(opt) { if (opt.recordType === FILE_SUFFIX.mp4) { if (opt.useWasm || opt.useMSE || opt.useWCS) { if (window.JessibucaProMp4Recorder && opt.mp4RecordUseWasm) { return WasmMp4Loader; } return MP4RecorderLoader; } } else if (opt.recordType === FILE_SUFFIX.flv) { return FlvRecorderLoader; } // default use RecordRTC return RecordRTCLoader; } } function checkContinuation(uint8array, start, checkLength) { let array = uint8array; if (start + checkLength < array.length) { while (checkLength--) { if ((array[++start] & 0xC0) !== 0x80) return false; } return true; } else { return false; } } function decodeUTF8(uint8array) { let out = []; let input = uint8array; let i = 0; let length = uint8array.length; while (i < length) { if (input[i] < 0x80) { out.push(String.fromCharCode(input[i])); ++i; continue; } else if (input[i] < 0xC0) ; else if (input[i] < 0xE0) { if (checkContinuation(input, i, 1)) { let ucs4 = (input[i] & 0x1F) << 6 | input[i + 1] & 0x3F; if (ucs4 >= 0x80) { out.push(String.fromCharCode(ucs4 & 0xFFFF)); i += 2; continue; } } } else if (input[i] < 0xF0) { if (checkContinuation(input, i, 2)) { let ucs4 = (input[i] & 0xF) << 12 | (input[i + 1] & 0x3F) << 6 | input[i + 2] & 0x3F; if (ucs4 >= 0x800 && (ucs4 & 0xF800) !== 0xD800) { out.push(String.fromCharCode(ucs4 & 0xFFFF)); i += 3; continue; } } } else if (input[i] < 0xF8) { if (checkContinuation(input, i, 3)) { let ucs4 = (input[i] & 0x7) << 18 | (input[i + 1] & 0x3F) << 12 | (input[i + 2] & 0x3F) << 6 | input[i + 3] & 0x3F; if (ucs4 > 0x10000 && ucs4 < 0x110000) { ucs4 -= 0x10000; out.push(String.fromCharCode(ucs4 >>> 10 | 0xD800)); out.push(String.fromCharCode(ucs4 & 0x3FF | 0xDC00)); i += 4; continue; } } } out.push(String.fromCharCode(0xFFFD)); ++i; } return out.join(''); } let le = function () { let buf = new ArrayBuffer(2); new DataView(buf).setInt16(0, 256, true); // little-endian write return new Int16Array(buf)[0] === 256; // platform-spec read, if equal then LE }(); class AMF { static parseScriptData(arrayBuffer, dataOffset, dataSize) { let data = {}; try { let name = AMF.parseValue(arrayBuffer, dataOffset, dataSize); let value = AMF.parseValue(arrayBuffer, dataOffset + name.size, dataSize - name.size); data[name.data] = value.data; } catch (e) { console.error('AMF', e.toString()); } return data; } static parseObject(arrayBuffer, dataOffset, dataSize) { let name = AMF.parseString(arrayBuffer, dataOffset, dataSize); let value = AMF.parseValue(arrayBuffer, dataOffset + name.size, dataSize - name.size); let isObjectEnd = value.objectEnd; return { data: { name: name.data, value: value.data }, size: name.size + value.size, objectEnd: isObjectEnd }; } static parseVariable(arrayBuffer, dataOffset, dataSize) { return AMF.parseObject(arrayBuffer, dataOffset, dataSize); } static parseString(arrayBuffer, dataOffset, dataSize) { let v = new DataView(arrayBuffer, dataOffset, dataSize); let length = v.getUint16(0, !le); let str; if (length > 0) { str = decodeUTF8(new Uint8Array(arrayBuffer, dataOffset + 2, length)); } else { str = ''; } return { data: str, size: 2 + length }; } static parseLongString(arrayBuffer, dataOffset, dataSize) { let v = new DataView(arrayBuffer, dataOffset, dataSize); let length = v.getUint32(0, !le); let str; if (length > 0) { str = decodeUTF8(new Uint8Array(arrayBuffer, dataOffset + 4, length)); } else { str = ''; } return { data: str, size: 4 + length }; } static parseDate(arrayBuffer, dataOffset, dataSize) { let v = new DataView(arrayBuffer, dataOffset, dataSize); let timestamp = v.getFloat64(0, !le); let localTimeOffset = v.getInt16(8, !le); timestamp += localTimeOffset * 60 * 1000; // get UTC time return { data: new Date(timestamp), size: 8 + 2 }; } static parseValue(arrayBuffer, dataOffset, dataSize) { let v = new DataView(arrayBuffer, dataOffset, dataSize); let offset = 1; let type = v.getUint8(0); let value; let objectEnd = false; try { switch (type) { case 0: // Number(Double) type value = v.getFloat64(1, !le); offset += 8; break; case 1: { // Boolean type let b = v.getUint8(1); value = b ? true : false; offset += 1; break; } case 2: { // String type let amfstr = AMF.parseString(arrayBuffer, dataOffset + 1, dataSize - 1); value = amfstr.data; offset += amfstr.size; break; } case 3: { // Object(s) type value = {}; let terminal = 0; // workaround for malformed Objects which has missing ScriptDataObjectEnd if ((v.getUint32(dataSize - 4, !le) & 0x00FFFFFF) === 9) { terminal = 3; } while (offset < dataSize - 4) { // 4 === type(UI8) + ScriptDataObjectEnd(UI24) let amfobj = AMF.parseObject(arrayBuffer, dataOffset + offset, dataSize - offset - terminal); if (amfobj.objectEnd) break; value[amfobj.data.name] = amfobj.data.value; offset += amfobj.size; } if (offset <= dataSize - 3) { let marker = v.getUint32(offset - 1, !le) & 0x00FFFFFF; if (marker === 9) { offset += 3; } } break; } case 8: { // ECMA array type (Mixed array) value = {}; offset += 4; // ECMAArrayLength(UI32) let terminal = 0; // workaround for malformed MixedArrays which has missing ScriptDataObjectEnd if ((v.getUint32(dataSize - 4, !le) & 0x00FFFFFF) === 9) { terminal = 3; } while (offset < dataSize - 8) { // 8 === type(UI8) + ECMAArrayLength(UI32) + ScriptDataVariableEnd(UI24) let amfvar = AMF.parseVariable(arrayBuffer, dataOffset + offset, dataSize - offset - terminal); if (amfvar.objectEnd) break; value[amfvar.data.name] = amfvar.data.value; offset += amfvar.size; } if (offset <= dataSize - 3) { let marker = v.getUint32(offset - 1, !le) & 0x00FFFFFF; if (marker === 9) { offset += 3; } } break; } case 9: // ScriptDataObjectEnd value = undefined; offset = 1; objectEnd = true; break; case 10: { // Strict array type // ScriptDataValue[n]. NOTE: according to video_file_format_spec_v10_1.pdf value = []; let strictArrayLength = v.getUint32(1, !le); offset += 4; for (let i = 0; i < strictArrayLength; i++) { let val = AMF.parseValue(arrayBuffer, dataOffset + offset, dataSize - offset); value.push(val.data); offset += val.size; } break; } case 11: { // Date type let date = AMF.parseDate(arrayBuffer, dataOffset + 1, dataSize - 1); value = date.data; offset += date.size; break; } case 12: { // Long string type let amfLongStr = AMF.parseString(arrayBuffer, dataOffset + 1, dataSize - 1); value = amfLongStr.data; offset += amfLongStr.size; break; } default: // ignore and skip offset = dataSize; console.warn('AMF', 'Unsupported AMF value type ' + type); } } catch (e) { console.error('AMF', e.toString()); } return { data: value, size: offset, objectEnd: objectEnd }; } } function parseFlvScriptData(arrayBuffer) { return AMF.parseScriptData(arrayBuffer.buffer, arrayBuffer.byteOffset, arrayBuffer.byteLength); } class DecoderWorker { constructor(player) { this.player = player; this.destroyResolve = null; this.workerClearTimeout = null; this.workerUrl = null; let decoder = player._opt.decoder; this.decoderWorkerCloseTimeout = null; // wcs or mse if (isFalse(this.player._opt.useWasm)) { if (this.player._opt.demuxUseWorker) { // mse decoder audio + not audio if (this.player._opt.mseDecodeAudio || isFalse(this.player._opt.hasAudio)) { decoder = player._opt.decoderHardNotWasm; } else { decoder = player._opt.decoderHard; } } else { decoder = player._opt.decoderAudio; } } if (decoder.indexOf('http') === 0 && this.player._opt.isDecoderUseCDN) { const blob = new Blob([`importScripts("${decoder}")`], { "type": 'application/javascript' }); decoder = window.URL.createObjectURL(blob); this.workerUrl = decoder; // 必须要释放,不然每次调用内存都明显泄露内存 //chrome 83 file协议下如果直接释放,将会使WebWorker无法启动 this.workerClearTimeout = setTimeout(() => { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; this.workerClearTimeout = null; }, URL_OBJECT_CLEAR_TIME); } this.decoderWorker = new Worker(decoder); this._initDecoderWorker(); player.debug.log('decoderWorker', `init and decoder url is ${decoder}`); player.on(EVENTS.visibilityChange, () => { this.updateWorkConfig({ key: 'visibility', value: player.visibility }); }); } destroy() { return new Promise((resolve, reject) => { if (this.player.loaded) { this.player.debug.log('decoderWorker', 'has loaded and post message to destroy'); if (this.decoderWorker) { this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.close }); this.destroyResolve = resolve; // 需要添加个防御如果worker 线程没有返回,则直接触发超时逻辑。 this.decoderWorkerCloseTimeout = setTimeout(() => { this.player.debug.warn('decoderWorker', 'send close but not response and destroy directly'); if (this.decoderWorkerCloseTimeout) { clearTimeout(this.decoderWorkerCloseTimeout); this.decoderWorkerCloseTimeout = null; } this._destroy(); setTimeout(() => { resolve(); }, 0); }, 2 * 1000); } else { this.player.debug.warn('decoderWorker', 'has loaded but decoderWorker is null and destroy directly'); this._destroy(); setTimeout(() => { resolve(); }, 0); } } else { this.player.debug.log('decoderWorker', 'has not loaded and destroy directly'); this._destroy(); setTimeout(() => { resolve(); }, 0); } }); } _destroy() { if (this.decoderWorkerCloseTimeout) { clearTimeout(this.decoderWorkerCloseTimeout); this.decoderWorkerCloseTimeout = null; } if (this.workerUrl) { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; } if (this.workerClearTimeout) { clearTimeout(this.workerClearTimeout); this.workerClearTimeout = null; } if (this.decoderWorker) { this.decoderWorker.terminate(); this.decoderWorker.onerror = null; this.decoderWorker.onmessageerror = null; this.decoderWorker.onmessage = null; this.decoderWorker = null; } this.player.debug.log(`decoderWorker`, 'destroy'); if (this.destroyResolve) { this.destroyResolve(); this.destroyResolve = null; } } _initDecoderWorker() { const { debug, events: { proxy } } = this.player; // this.decoderWorker.onerror = e => { this.player.debug.error(`decoderWorker`, 'onerror', e); this.player.emitError(EVENTS_ERROR.decoderWorkerInitError, e); }; this.decoderWorker.onmessageerror = e => { this.player.debug.error(`decoderWorker`, 'onmessageerror', e); }; this.decoderWorker.onmessage = event => { const msg = event.data; switch (msg.cmd) { case WORKER_CMD_TYPE.init: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.init); // if (this.decoderWorker) { this._initWork(); } if (!this.player.loaded) { this.player.emit(EVENTS.load); } this.player.emit(EVENTS.decoderWorkerInit); break; case WORKER_CMD_TYPE.videoCode: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.videoCode, msg.code); if (!this.player._times.decodeStart) { this.player._times.decodeStart = now$2(); } this.player.video.updateVideoInfo({ encTypeCode: msg.code }); break; case WORKER_CMD_TYPE.videoCodec: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.videoCodec, msg.codecId); if (this.player.recorder) { this.player.recorder.initMetaData(msg.buffer, msg.codecId); } this.player.video.updateVideoInfo({ encTypeCode: msg.codecId }); break; case WORKER_CMD_TYPE.audioCode: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.audioCode, msg.code); this.player.audio && this.player.audio.updateAudioInfo({ encTypeCode: msg.code }); break; case WORKER_CMD_TYPE.audioAACSequenceHeader: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.audioAACSequenceHeader); if (this.player.recorder) { this.player.recorder.initAudioAacExtraData(msg.buffer); } break; case WORKER_CMD_TYPE.initVideo: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.initVideo, `width:${msg.w},height:${msg.h}`); if (isEmpty(msg.w) || isEmpty(msg.h)) { this.player.emitError(EVENTS_ERROR.videoInfoError, `video width ${msg.w} or height ${msg.h} is empty`); return; } this.player.video.updateVideoInfo({ width: msg.w, height: msg.h }); // just for canvas render if (!this.player._opt.openWebglAlignment && !isWebglRenderSupport(msg.w) && this.player.getRenderType() === RENDER_TYPE.canvas) { this.player.emitError(EVENTS_ERROR.webglAlignmentError); return; } this.player.video.initCanvasViewSize(); if (this.player._opt.playType === PLAY_TYPE.playbackTF) { this.player.video.initFps(); this.player.video.initVideoDelay(); } break; case WORKER_CMD_TYPE.initAudio: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.initAudio, `channels:${msg.channels},sampleRate:${msg.sampleRate}`); if (msg.channels > AUDIO_CHANNEL_MAX) { this.player.emitError(EVENTS_ERROR.audioChannelError, `audio channel is ${msg.channels}, max is ${AUDIO_CHANNEL_MAX}`); return; } if (this.player.audio) { this.player.audio.updateAudioInfo(msg); if (this.player._opt.playType === PLAY_TYPE.player) { this.player.audio.initScriptNode(); } else if (this.player._opt.playType === PLAY_TYPE.playbackTF) { this.player.audio.initScriptNodeDelay(); } } break; case WORKER_CMD_TYPE.render: if (!this.player.video) { debug.warn('decoderWorker', 'onmessage render but video is null'); return; } // debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.render, `msg ts:${msg.ts}`); if (this.player.isPlayer()) { // un caught typeError: failed to construct 'VideoFrame': codedWidth muse be nonzero; if (isFalse(this.player.video.getHasInit())) { debug.warn('decoderWorker', 'onmessage render but video has not init'); return; } this.player.video.render(msg); this.player.handleRender(); this.player.emit(EVENTS.timeUpdate, msg.ts); this.player.updateStats({ dfps: true, buf: msg.delay }); if (!this.player._times.videoStart) { this.player._times.videoStart = now$2(); this.player.handlePlayToRenderTimes(); } } else if (this.player.isPlayback()) { this.player.updateStats({ dfps: true }); if (isFalse(this.player.playbackPause)) { if (this.player.playback.isUseLocalCalculateTime) { this.player.playback.increaseLocalTimestamp(); } if (this.player.playback.isUseFpsRender) { this.player.video.pushData(msg); } else { this.player.video.render$2(msg); } } else { // 暂停的时候,如果不需要清除缓存 if (!this.player.playback.isPlaybackPauseClearCache && this.player.playback.isCacheBeforeDecodeForFpsRender) { if (this.player.playback.isUseFpsRender) { this.player.video.pushData(msg); } } } } break; case WORKER_CMD_TYPE.videoNalu: if (this.player.recorder && this.player.recorder.isRecording && this.player._opt.recordType === FILE_SUFFIX.mp4) { this.player.recorder.handleAddNaluTrack(msg.buffer, msg.isIFrame, msg.ts, msg.cts); } break; case WORKER_CMD_TYPE.audioNalu: if (this.player.recorder && this.player.recorder.isRecording && this.player._opt.recordType === FILE_SUFFIX.mp4 && this.player.recorder.isWasmMp4()) { this.player.recorder.handleAddAudioTrack(msg.buffer, msg.ts); } break; case WORKER_CMD_TYPE.videoPayload: const { webcodecsDecoder, mseDecoder } = this.player; this.player.updateStats({ buf: msg.delay }); const uint8Array = new Uint8Array(msg.payload); if (this.player._opt.useWCS && !this.player._opt.useOffscreen) { webcodecsDecoder.decodeVideo(uint8Array, msg.ts, msg.isIFrame, msg.cts); } else if (this.player._opt.useMSE) { mseDecoder.decodeVideo(uint8Array, msg.ts, msg.isIFrame, msg.cts); } break; case WORKER_CMD_TYPE.audioPayload: if (this.player._opt.useMSE) { const uint8Array = new Uint8Array(msg.payload); this.player.mseDecoder.decodeAudio(uint8Array, msg.ts, msg.cts); } break; case WORKER_CMD_TYPE.playAudio: // debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.playAudio, `msg ts:${msg.ts}`); // 只有在 playing 的时候。 // 或者 设置hasVideo 为false的情况 if (!this.player.audio) { debug.warn('decoderWorker', 'onmessage playAudio but audio is null'); return; } if (this.player.playing && this.player.audio || !this.player.video) { // 如果不解码video if (!this.player._opt.hasVideo) { this.player.handleRender(); } if (this.player._opt.playType === PLAY_TYPE.player) { this.player.audio.play(msg.buffer, msg.ts); } else if (this.player._opt.playType === PLAY_TYPE.playbackTF) { if (isFalse(this.player.playbackPause)) { this.player.audio.play(msg.buffer, msg.ts); } else { if (!this.player.playback.isPlaybackPauseClearCache && this.player.playback.isCacheBeforeDecodeForFpsRender) { if (this.player.playback.isUseFpsRender) { this.player.audio.play(msg.buffer, msg.ts); } } } } } break; case WORKER_CMD_TYPE.workerFetch: // debug.log(`decoderWorker`, `workerFetch: ${msg.type},${msg.value}`); if (msg.type === EVENTS.streamSuccess) { if (this.player.stream) { this.player.stream.emit(EVENTS.streamSuccess); } else { debug.warn('decoderWorker', `onmessage and workerFetch response stream success but stream is null`); } } else if (msg.type === EVENTS.streamRate) { this.player.emit(EVENTS.kBps, (msg.value / 1024).toFixed(2)); } else if (msg.type === EVENTS.streamEnd) { if (this.player) { if (msg.value === PLAYER_STREAM_TYPE.websocket) { this.player.emit(EVENTS.websocketClose, msg.msg); } if (this.player.stream) { this.player.stream.emit(EVENTS.streamEnd, msg.msg); } else { debug && debug.warn('decoderWorker', `onmessage and workerFetch response stream end but player.stream is null`); } } else { debug && debug.warn('decoderWorker', `onmessage and workerFetch response stream end but player is null`); } } else if (msg.type === EVENTS_ERROR.websocketError) { if (this.player && this.player.stream) { this.player.stream.emit(EVENTS_ERROR.websocketError, msg.value); } else { debug && debug.warn('decoderWorker', `onmessage and workerFetch response websocket error but stream is null`); } } else if (msg.type === EVENTS_ERROR.fetchError) { if (this.player && this.player.stream) { this.player.stream.emit(EVENTS_ERROR.fetchError, msg.value); } else { debug && debug.warn('decoderWorker', `onmessage and workerFetch response fetch error but stream is null`); } } else if (msg.type === EVENTS.streamAbps) { this.player.updateStats({ abps: msg.value }); } else if (msg.type === EVENTS.streamVbps) { if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } this.player.updateStats({ vbps: msg.value }); } else if (msg.type === EVENTS.streamDts) { this.player.updateStats({ dts: msg.value }); } else if (msg.type === EVENTS.netBuf) { this.player.updateStats({ netBuf: msg.value }); } else if (msg.type === EVENTS.networkDelayTimeout) { this.player.emit(EVENTS.networkDelayTimeout, msg.value); } else if (msg.type === EVENTS.streamStats) { const obj = JSON.parse(msg.value); this.player.updateStats({ workerStats: obj }); } else if (msg.type === EVENTS.websocketOpen) { this.player.emit(EVENTS.websocketOpen); } break; case WORKER_CMD_TYPE.iframeIntervalTs: if (this.player) { this.player.videoIframeIntervalTs = msg.value; } break; case WORKER_CMD_TYPE.isDropping: if (this.player) { this.player.updateStats({ isDropping: true }); } break; case WORKER_CMD_TYPE.checkFirstIFrame: this.player.decoderCheckFirstIFrame(); break; case WORKER_CMD_TYPE.playbackStreamVideoFps: if (this.player && this.player.video) { this.player.video.setStreamFps(msg.value); } break; case WORKER_CMD_TYPE.wasmError: if (msg.message) { if (msg.message.indexOf(WASM_ERROR.invalidNalUnitSize) !== -1) { this.player.emitError(EVENTS_ERROR.wasmDecodeError, ''); } } break; case WORKER_CMD_TYPE.wasmDecodeVideoNoResponseError: this.player.emitError(EVENTS_ERROR.wasmDecodeVideoNoResponseError); break; case WORKER_CMD_TYPE.simdH264DecodeVideoWidthIsTooLarge: this.player.emitError(EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge); break; case WORKER_CMD_TYPE.wasmWidthOrHeightChange: this.player.emitError(EVENTS_ERROR.wasmWidthOrHeightChange); break; case WORKER_CMD_TYPE.simdDecodeError: this.player.emitError(EVENTS_ERROR.simdDecodeError); break; case WORKER_CMD_TYPE.workerEnd: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.workerEnd); break; case WORKER_CMD_TYPE.closeEnd: debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.closeEnd); this._destroy(); break; case WORKER_CMD_TYPE.tempStream: if (this.player) { this.player.pushTempStream(msg.buffer); } break; case WORKER_CMD_TYPE.videoSEI: if (this.player) { this.player.emit(EVENTS.videoSEI, { ts: msg.ts, data: new Uint8Array(msg.buffer) }); } break; case WORKER_CMD_TYPE.flvScriptData: if (this.player) { if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(msg.buffer); this.player.recorder.addMetaData(payloadCopy); } const payload = new Uint8Array(msg.buffer); const scriptObj = parseFlvScriptData(payload); if (scriptObj && scriptObj.onMetaData) { this.player.updateMetaData(scriptObj.onMetaData); } } break; case WORKER_CMD_TYPE.aacSequenceHeader: if (this.player && this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(msg.buffer); this.player.recorder.addAACSequenceHeader(payloadCopy, msg.ts); } break; case WORKER_CMD_TYPE.videoSequenceHeader: if (this.player && this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(msg.buffer); this.player.recorder.addVideoSequenceHeader(payloadCopy, msg.ts); } break; case WORKER_CMD_TYPE.flvBufferData: if (this.player && this.player.isRecordTypeFlv() && this.player.recording) { const payloadCopy = new Uint8Array(msg.buffer); if (msg.type === MEDIA_TYPE.video) { this.player.recorder.addVideo(payloadCopy, msg.ts); } else if (msg.type === MEDIA_TYPE.audio) { this.player.recorder.addAudio(payloadCopy, msg.ts); } } break; case WORKER_CMD_TYPE.mseHandle: if (this.player) { this.player.debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.mseHandle); this.player.video.$videoElement.srcObject = msg.mseHandle; } break; case WORKER_CMD_TYPE.mseFirstRenderTime: if (this.player) { this.player.debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.mseFirstRenderTime, msg.value); this.player._mseWorkerData.firstRenderTime = Number(msg.value); } break; case WORKER_CMD_TYPE.mseError: if (this.player) { this.player.debug.log(`decoderWorker`, 'onmessage:', WORKER_CMD_TYPE.mseError, msg.value, msg.msg); this.player.emitError(msg.value, msg.msg); } break; default: this.player[msg.cmd] && this.player[msg.cmd](msg); break; } }; } _initWork() { const opt = { debug: this.player._opt.debug, debugLevel: this.player._opt.debugLevel, debugUuid: this.player._opt.debugUuid, useOffscreen: this.player._opt.useOffscreen, useWCS: this.player._opt.useWCS, useMSE: this.player._opt.useMSE, videoBuffer: this.player._opt.videoBuffer, videoBufferDelay: this.player._opt.videoBufferDelay, openWebglAlignment: this.player._opt.openWebglAlignment, playType: this.player._opt.playType, hasAudio: this.player._opt.hasAudio, hasVideo: this.player._opt.hasVideo, playbackRate: 1, playbackForwardMaxRateDecodeIFrame: this.player._opt.playbackForwardMaxRateDecodeIFrame, playbackIsCacheBeforeDecodeForFpsRender: this.player._opt.playbackConfig.isCacheBeforeDecodeForFpsRender, sampleRate: this.player.audio && this.player.audio.audioContext && this.player.audio.audioContext.sampleRate || 0, audioBufferSize: this.player.audio && this.player.audio.getAudioBufferSize() || 1024, networkDelay: this.player._opt.networkDelay, visibility: this.player.visibility, useSIMD: this.player._opt.useSIMD, recordType: this.player._opt.recordType, checkFirstIFrame: this.player._opt.checkFirstIFrame, isM7sCrypto: this.player._opt.isM7sCrypto, isXorCrypto: this.player._opt.isXorCrypto, isSm4Crypto: this.player._opt.isSm4Crypto, sm4CryptoKey: this.player._opt.sm4CryptoKey, m7sCryptoAudio: this.player._opt.m7sCryptoAudio, isFlv: this.player._opt.isFlv, isFmp4: this.player._opt.isFmp4, isMpeg4: this.player._opt.isMpeg4, isTs: this.player._opt.isTs, isNakedFlow: this.player._opt.isNakedFlow, isHls265: this.player.isUseHls265(), isFmp4Private: this.player._opt.isFmp4Private, isEmitSEI: this.player._opt.isEmitSEI, isRecordTypeFlv: this.player.isRecordTypeFlv(), isWasmMp4: this.player.recorder && this.player.recorder.isWasmMp4() || false, isChrome: isChrome(), isDropSameTimestampGop: this.player._opt.isDropSameTimestampGop, mseDecodeAudio: this.player._opt.mseDecodeAudio, nakedFlowH265DemuxUseNew: this.player._opt.nakedFlowH265DemuxUseNew, mseDecoderUseWorker: this.player._opt.mseDecoderUseWorker, mseAutoCleanupMinBackwardDuration: this.player._opt.mseAutoCleanupMinBackwardDuration, mseAutoCleanupMaxBackwardDuration: this.player._opt.mseAutoCleanupMaxBackwardDuration, mseCorrectTimeDuration: this.player._opt.mseCorrectTimeDuration, mseCorrectAudioTimeDuration: this.player._opt.mseCorrectAudioTimeDuration }; this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.init, opt: JSON.stringify(opt) }); if (this.player._opt.isM7sCrypto) { this.updateWorkConfig({ key: 'cryptoKey', value: this.player._opt.cryptoKey }); this.updateWorkConfig({ key: 'cryptoIV', value: this.player._opt.cryptoIV }); } } decodeVideo(arrayBuffer, ts, isIFrame) { if (this.player._opt.playType === PLAY_TYPE.player) { if (this.player.isUseHls265()) { this._decodeVideoNoDelay(arrayBuffer, ts, isIFrame); } else { this._decodeVideo(arrayBuffer, ts, isIFrame); } } else if (this.player._opt.playType === PLAY_TYPE.playbackTF) { // if rate if (this.player.video.rate >= this.player._opt.playbackForwardMaxRateDecodeIFrame) { if (isIFrame) { this.player.debug.log(`decoderWorker`, `current rate is ${this.player.video.rate},only decode i frame`); this._decodeVideoNoDelay(arrayBuffer, ts, isIFrame); } } else { if (this.player.video.rate === 1) { this._decodeVideo(arrayBuffer, ts, isIFrame); } else { this._decodeVideoNoDelay(arrayBuffer, ts, isIFrame); } } } } _decodeVideo(arrayBuffer, ts, isIFrame) { const options = { type: MEDIA_TYPE.video, ts: Math.max(ts, 0), isIFrame }; // this.player.debug.log('decoderWorker', 'decodeVideo', options); this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.decode, buffer: arrayBuffer, options }, [arrayBuffer.buffer]); } /** * * @param arrayBuffer * @param ts * @private */ _decodeVideoNoDelay(arrayBuffer, ts, isIFrame) { this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.videoDecode, buffer: arrayBuffer, ts: Math.max(ts, 0), isIFrame }, [arrayBuffer.buffer]); } decodeAudio(arrayBuffer, ts) { if (this.player._opt.playType === PLAY_TYPE.player) { if (this.player._opt.useWCS) { this._decodeAudioNoDelay(arrayBuffer, ts); } else if (this.player._opt.useMSE) { this._decodeAudioNoDelay(arrayBuffer, ts); } else if (this.player.isUseHls265()) { this._decodeAudioNoDelay(arrayBuffer, ts); } else { this._decodeAudio(arrayBuffer, ts); } } else if (this.player._opt.playType === PLAY_TYPE.playbackTF) { // this._decodeAudioNoDelay(arrayBuffer, ts); if (this.player.video.rate === 1) { this._decodeAudio(arrayBuffer, ts); } else { this._decodeAudioNoDelay(arrayBuffer, ts); } } } // _decodeAudio(arrayBuffer, ts) { const options = { type: MEDIA_TYPE.audio, ts: Math.max(ts, 0) }; // this.player.debug.log('decoderWorker', 'decodeAudio',options); this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.decode, buffer: arrayBuffer, options }, [arrayBuffer.buffer]); } _decodeAudioNoDelay(arrayBuffer, ts) { // console.log('_decodeAudioNoDelay', ts); this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.audioDecode, buffer: arrayBuffer, ts: Math.max(ts, 0) }, [arrayBuffer.buffer]); } updateWorkConfig(config) { this.decoderWorker && this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.updateConfig, key: config.key, value: config.value }); } workerFetchStream(url) { const { _opt } = this.player; const opt = { protocol: _opt.protocol, isFlv: _opt.isFlv, isFmp4: _opt.isFmp4, isMpeg4: _opt.isMpeg4, isNakedFlow: _opt.isNakedFlow, isTs: _opt.isTs }; this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.fetchStream, url, opt: JSON.stringify(opt) }); } clearWorkBuffer() { let needClear = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.clearBuffer, needClear }); } workerSendMessage(message) { this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.sendWsMessage, message }); } // updateVideoTimestamp(message) { this.decoderWorker.postMessage({ cmd: WORKER_SEND_TYPE.mseUpdateVideoTimestamp, message }); } } // a simple ajax var jsonType = 'application/json, text/javascript'; var htmlType = 'text/html'; var xmlTypeRE = /^(?:text|application)\/xml/i; var rheaders = /^(.*?):[ \t]*([^\r\n]*)\r?$/mg; var rurl = /^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/; var blankRE = /^\s*$/; // \s var lastModified = {}; var etag = {}; var responseHeadersString = ''; var responseHeaders; /* * default setting * */ var _settings = { type: "GET", beforeSend: noop$1, success: noop$1, error: noop$1, complete: noop$1, context: null, xhr: function () { return new window.XMLHttpRequest(); }, accepts: { json: jsonType, xml: 'application/xml, text/xml', html: htmlType, text: 'text/plain', '*': "*/".concat("*") }, crossDomain: false, timeout: 0, username: null, password: null, processData: true, promise: noop$1, contentType: 'application/x-www-form-urlencoded; charset=UTF-8' }; function noop$1() {} var ajax = function (url, options) { if (typeof url === 'object') { options = url; url = undefined; } options = options || {}; // var settings = extend({}, options); // for (var key in _settings) { if (settings[key] === undefined) { settings[key] = _settings[key]; } } // try { var q = {}; var promise = new Promise(function (resolve, reject) { q.resolve = resolve; q.reject = reject; }); promise.resolve = q.resolve; promise.reject = q.reject; settings.promise = promise; } catch (e) { // settings.promise = { resolve: noop$1, reject: noop$1 }; } // url var ajaxLocParts = rurl.exec(window.location.href.toLowerCase()) || []; settings.url = ((url || settings.url || window.location.href) + '').replace(/#.*$/, '').replace(/^\/\//, ajaxLocParts[1] + "//"); var cacheURL = settings.url; // cross domain if (!settings.crossDomain) { settings.crossDomain = /^([\w-]+:)?\/\/([^\/]+)/.test(settings.url) && RegExp.$2 !== window.location.href; } // var dataType = settings.dataType; // jsonp if (dataType === 'jsonp') { // var hasPlaceholder = /=\?/.test(settings.url); if (!hasPlaceholder) { var jsonpCallback = (settings.jsonp || 'callback') + '=?'; settings.url = appendQuery(settings.url, jsonpCallback); } return JSONP(settings); } // serializeData(settings); var mime = settings.accepts[dataType] || settings.accepts['*']; // mime var baseHeader = {}; // header /^([\w-]+:)\/\//.test(settings.url) ? RegExp.$1 : window.location.protocol; // protocol var xhr = _settings.xhr(); var abortTimeout; // X-Requested-With header // For cross-domain requests, seeing as conditions for a preflight are // akin to a jigsaw puzzle, we simply never set it to be sure. // (it can always be set on a per-request basis or even using ajaxSetup) // For same-domain requests, won't change header if already provided. if (!settings.crossDomain && !baseHeader['X-Requested-With']) { baseHeader['X-Requested-With'] = 'XMLHttpRequest'; } // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. if (settings.ifModified) { if (lastModified[cacheURL]) { baseHeader['If-Modified-Since'] = lastModified[cacheURL]; } if (etag[cacheURL]) { baseHeader['If-None-Match'] = etag[cacheURL]; } } // mime if (mime) { // baseHeader['Accept'] = mime; if (mime.indexOf(',') > -1) { mime = mime.split(',', 2)[0]; } // xhr.overrideMimeType && xhr.overrideMimeType(mime); } // not get and not head var hasContent = !/^(?:GET|HEAD)$/.test(settings.type.toUpperCase()); // if (settings.data && hasContent && settings.contentType !== false || options.contentType) { baseHeader['Content-Type'] = settings.contentType; } // cache: default true if (settings.cache === false && !hasContent) { var rts = /([?&])_=[^&]*/; settings.url = rts.test(cacheURL) ? cacheURL.replace(rts, "$1_=" + now$1()) : cacheURL + (/\?/.test(cacheURL) ? "&" : "?") + "_=" + now$1(); } // headers settings.headers = extend(baseHeader, settings.headers || {}); // on ready state change xhr.onreadystatechange = function () { // readystate if (xhr.readyState === 4) { clearTimeout(abortTimeout); var result; var error = false; var isSuccess = xhr.status >= 200 && xhr.status < 300 || xhr.status === 304; // if (isSuccess) { responseHeadersString = xhr.getAllResponseHeaders(); if (settings.ifModified) { var modified = getResponseHeader('Last-Modified'); if (modified) { lastModified[cacheURL] = modified; } modified = getResponseHeader('etag'); if (modified) { etag[cacheURL] = modified; } } dataType = dataType || mimeToDataType(xhr.getResponseHeader('content-type')); result = xhr.responseText; try { // xml if (dataType === 'xml') { result = xhr.responseXML; } // json else if (dataType === 'json') { result = blankRE.test(result) ? null : JSON.parse(result); } } catch (e) { error = e; } if (error) { ajaxError(error, 'parseerror', xhr, settings); } else { ajaxSuccess(result, xhr, settings); } } else { ajaxError(null, 'error', xhr, settings); } } }; // async var async = 'async' in settings ? settings.async : true; // open xhr.open(settings.type, settings.url, async, settings.username, settings.password); // xhrFields if (settings.xhrFields) { for (var name in settings.xhrFields) { xhr[name] = settings.xhrFields[name]; } } // Override mime type if needed if (settings.mimeType && xhr.overrideMimeType) { xhr.overrideMimeType(settings.mimeType); } // set request header for (var name in settings.headers) { // Support: IE<9 // IE's ActiveXObject throws a 'Type Mismatch' exception when setting // request header to a null-value. // // To keep consistent with other XHR implementations, cast the value // to string and ignore `undefined`. if (settings.headers[name] !== undefined) { xhr.setRequestHeader(name, settings.headers[name] + ""); } } // before send if (ajaxBeforeSend(xhr, settings) === false) { xhr.abort(); return false; } // timeout if (settings.timeout > 0) { abortTimeout = window.setTimeout(function () { xhr.onreadystatechange = noop$1; xhr.abort(); ajaxError(null, 'timeout', xhr, settings); }, settings.timeout); } // send xhr.send(settings.data ? settings.data : null); // abort method settings.promise.abort = function () { xhr.abort(); }; // return settings.promise; }; /* * method get * */ ajax.get = function (url, data, success, dataType) { if (isFunction(data)) { dataType = dataType || success; success = data; data = undefined; } return ajax({ url: url, data: data, success: success, dataType: dataType }); }; /* * method post * * dataType: * */ ajax.post = function (url, data, success, dataType) { if (isFunction(data)) { dataType = dataType || success; success = data; data = undefined; } return ajax({ type: 'POST', url: url, data: data, success: success, dataType: dataType }); }; /* * method getJSON * */ ajax.getJSON = function (url, data, success) { if (isFunction(data)) { success = data; data = undefined; } return ajax({ url: url, data: data, success: success, dataType: 'json' }); }; /* * method ajaxSetup * */ ajax.ajaxSetup = function (target, settings) { return settings ? extend(extend(target, _settings), settings) : extend(_settings, target); }; /* * utils * * */ // triggers and extra global event ajaxBeforeSend that's like ajaxSend but cancelable function ajaxBeforeSend(xhr, settings) { var context = settings.context; // if (settings.beforeSend.call(context, xhr, settings) === false) { return false; } } // ajax success function ajaxSuccess(data, xhr, settings) { var context = settings.context; var status = 'success'; settings.success.call(context, data, status, xhr); settings.promise.resolve(data, status, xhr); ajaxComplete(status, xhr, settings); } // status: "success", "notmodified", "error", "timeout", "abort", "parsererror" function ajaxComplete(status, xhr, settings) { var context = settings.context; settings.complete.call(context, xhr, status); } // type: "timeout", "error", "abort", "parsererror" function ajaxError(error, type, xhr, settings) { var context = settings.context; settings.error.call(context, xhr, type, error); settings.promise.reject(xhr, type, error); ajaxComplete(type, xhr, settings); } function getResponseHeader(key) { var match; if (!responseHeaders) { responseHeaders = {}; while (match = rheaders.exec(responseHeadersString)) { responseHeaders[match[1].toLowerCase()] = match[2]; } match = responseHeaders[key.toLowerCase()]; } return match === null ? null : match; } // jsonp /* * tks: https://www.cnblogs.com/rubylouvre/archive/2011/02/13/1953087.html * */ function JSONP(options) { // var callbackName = options.jsonpCallback || 'jsonp' + now$1(); var script = window.document.createElement('script'); var abort = function () { // 设置 window.xxx = noop if (callbackName in window) { window[callbackName] = noop$1; } }; var xhr = { abort: abort }; var abortTimeout; var head = window.document.getElementsByTagName('head')[0] || window.document.documentElement; // ie8+ script.onerror = function (error) { _error(error); }; function _error(error) { window.clearTimeout(abortTimeout); xhr.abort(); ajaxError(error.type, xhr, error.type, options); _removeScript(); } window[callbackName] = function (data) { window.clearTimeout(abortTimeout); ajaxSuccess(data, xhr, options); _removeScript(); }; // serializeData(options); script.src = options.url.replace(/=\?/, '=' + callbackName); // script.src = appendQuery(script.src, '_=' + new Date().getTime()); // script.async = true; // script charset if (options.scriptCharset) { script.charset = options.scriptCharset; } // head.insertBefore(script, head.firstChild); // if (options.timeout > 0) { abortTimeout = window.setTimeout(function () { xhr.abort(); ajaxError('timeout', xhr, 'timeout', options); _removeScript(); }, options.timeout); } // remove script function _removeScript() { if (script.clearAttributes) { script.clearAttributes(); } else { script.onload = script.onreadystatechange = script.onerror = null; } if (script.parentNode) { script.parentNode.removeChild(script); } // script = null; delete window[callbackName]; } // add abort options.promise.abort = function () { xhr.abort(); }; // add xhr options.promise.xhr = xhr; return options.promise; } // mime to data type function mimeToDataType(mime) { return mime && (mime === htmlType ? 'html' : mime === jsonType ? 'json' : xmlTypeRE.test(mime) && 'xml') || 'text'; } // append query function appendQuery(url, query) { return (url + '&' + query).replace(/[&?]{1,2}/, '?'); } // serialize data function serializeData(options) { // formData if (isObject$1(options) && !isFormData(options.data) && options.processData) { options.data = param(options.data); } if (options.data && (!options.type || options.type.toUpperCase() === 'GET')) { options.url = appendQuery(options.url, options.data); } } // serialize function serialize(params, obj, traditional, scope) { var _isArray = isArray(obj); for (var key in obj) { var value = obj[key]; if (scope) { key = traditional ? scope : scope + '[' + (_isArray ? '' : key) + ']'; } // handle data in serializeArray format if (!scope && _isArray) { params.add(value.name, value.value); } else if (traditional ? _isArray(value) : isObject$1(value)) { serialize(params, value, traditional, key); } else { params.add(key, value); } } } // param function param(obj, traditional) { var params = []; // params.add = function (k, v) { this.push(encodeURIComponent(k) + '=' + encodeURIComponent(v)); }; serialize(params, obj, traditional); return params.join('&').replace('%20', '+'); } // extend function extend(target) { var slice = Array.prototype.slice; var args = slice.call(arguments, 1); // for (var i = 0, length = args.length; i < length; i++) { var source = args[i] || {}; for (var key in source) { if (source.hasOwnProperty(key) && source[key] !== undefined) { target[key] = source[key]; } } } return target; } // is object function isObject$1(obj) { var type = typeof obj; return type === 'function' || type === 'object' && !!obj; } // is formData function isFormData(obj) { return obj instanceof FormData; } // is array function isArray(value) { return Object.prototype.toString.call(value) === "[object Array]"; } // is function function isFunction(value) { return typeof value === "function"; } function now$1() { return new Date().getTime(); } function checkInt(value) { return parseInt(value) === value; } function checkInts(arrayish) { if (!checkInt(arrayish.length)) { return false; } for (var i = 0; i < arrayish.length; i++) { if (!checkInt(arrayish[i]) || arrayish[i] < 0 || arrayish[i] > 255) { return false; } } return true; } function coerceArray(arg, copy) { // ArrayBuffer view if (arg.buffer && arg.name === 'Uint8Array') { if (copy) { if (arg.slice) { arg = arg.slice(); } else { arg = Array.prototype.slice.call(arg); } } return arg; } // It's an array; check it is a valid representation of a byte if (Array.isArray(arg)) { if (!checkInts(arg)) { throw new Error('Array contains invalid value: ' + arg); } return new Uint8Array(arg); } // Something else, but behaves like an array (maybe a Buffer? Arguments?) if (checkInt(arg.length) && checkInts(arg)) { return new Uint8Array(arg); } throw new Error('unsupported array-like object'); } function createArray(length) { return new Uint8Array(length); } function copyArray(sourceArray, targetArray, targetStart, sourceStart, sourceEnd) { if (sourceStart != null || sourceEnd != null) { if (sourceArray.slice) { sourceArray = sourceArray.slice(sourceStart, sourceEnd); } else { sourceArray = Array.prototype.slice.call(sourceArray, sourceStart, sourceEnd); } } targetArray.set(sourceArray, targetStart); } var convertUtf8 = function () { function toBytes(text) { var result = [], i = 0; text = encodeURI(text); while (i < text.length) { var c = text.charCodeAt(i++); // if it is a % sign, encode the following 2 bytes as a hex value if (c === 37) { result.push(parseInt(text.substr(i, 2), 16)); i += 2; // otherwise, just the actual byte } else { result.push(c); } } return coerceArray(result); } function fromBytes(bytes) { var result = [], i = 0; while (i < bytes.length) { var c = bytes[i]; if (c < 128) { result.push(String.fromCharCode(c)); i++; } else if (c > 191 && c < 224) { result.push(String.fromCharCode((c & 0x1f) << 6 | bytes[i + 1] & 0x3f)); i += 2; } else { result.push(String.fromCharCode((c & 0x0f) << 12 | (bytes[i + 1] & 0x3f) << 6 | bytes[i + 2] & 0x3f)); i += 3; } } return result.join(''); } return { toBytes: toBytes, fromBytes: fromBytes }; }(); var convertHex = function () { function toBytes(text) { var result = []; for (var i = 0; i < text.length; i += 2) { result.push(parseInt(text.substr(i, 2), 16)); } return result; } // http://ixti.net/development/javascript/2011/11/11/base64-encodedecode-of-utf8-in-browser-with-js.html var Hex = '0123456789abcdef'; function fromBytes(bytes) { var result = []; for (var i = 0; i < bytes.length; i++) { var v = bytes[i]; result.push(Hex[(v & 0xf0) >> 4] + Hex[v & 0x0f]); } return result.join(''); } return { toBytes: toBytes, fromBytes: fromBytes }; }(); // Number of rounds by keysize var numberOfRounds = { 16: 10, 24: 12, 32: 14 }; // Round constant words var rcon = [0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91]; // S-box and Inverse S-box (S is for Substitution) var S = [0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]; var Si = [0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]; // Transformations for encryption var T1 = [0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a]; var T2 = [0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616]; var T3 = [0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16]; var T4 = [0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c]; // Transformations for decryption var T5 = [0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742]; var T6 = [0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857]; var T7 = [0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8]; var T8 = [0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0]; // Transformations for decryption key expansion var U1 = [0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3]; var U2 = [0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697]; var U3 = [0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46]; var U4 = [0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d]; function convertToInt32(bytes) { var result = []; for (var i = 0; i < bytes.length; i += 4) { result.push(bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]); } return result; } var AesJs = function (key) { if (!(this instanceof AesJs)) { throw Error('AES must be instanitated with `new`'); } Object.defineProperty(this, 'key', { value: coerceArray(key, true) }); this._prepare(); }; AesJs.prototype._prepare = function () { var rounds = numberOfRounds[this.key.length]; if (rounds == null) { throw new Error('invalid key size (must be 16, 24 or 32 bytes)'); } // encryption round keys this._Ke = []; // decryption round keys this._Kd = []; for (var i = 0; i <= rounds; i++) { this._Ke.push([0, 0, 0, 0]); this._Kd.push([0, 0, 0, 0]); } var roundKeyCount = (rounds + 1) * 4; var KC = this.key.length / 4; // convert the key into ints var tk = convertToInt32(this.key); // copy values into round key arrays var index; for (var i = 0; i < KC; i++) { index = i >> 2; this._Ke[index][i % 4] = tk[i]; this._Kd[rounds - index][i % 4] = tk[i]; } // key expansion (fips-197 section 5.2) var rconpointer = 0; var t = KC, tt; while (t < roundKeyCount) { tt = tk[KC - 1]; tk[0] ^= S[tt >> 16 & 0xFF] << 24 ^ S[tt >> 8 & 0xFF] << 16 ^ S[tt & 0xFF] << 8 ^ S[tt >> 24 & 0xFF] ^ rcon[rconpointer] << 24; rconpointer += 1; // key expansion (for non-256 bit) if (KC != 8) { for (var i = 1; i < KC; i++) { tk[i] ^= tk[i - 1]; } // key expansion for 256-bit keys is "slightly different" (fips-197) } else { for (var i = 1; i < KC / 2; i++) { tk[i] ^= tk[i - 1]; } tt = tk[KC / 2 - 1]; tk[KC / 2] ^= S[tt & 0xFF] ^ S[tt >> 8 & 0xFF] << 8 ^ S[tt >> 16 & 0xFF] << 16 ^ S[tt >> 24 & 0xFF] << 24; for (var i = KC / 2 + 1; i < KC; i++) { tk[i] ^= tk[i - 1]; } } // copy values into round key arrays var i = 0, r, c; while (i < KC && t < roundKeyCount) { r = t >> 2; c = t % 4; this._Ke[r][c] = tk[i]; this._Kd[rounds - r][c] = tk[i++]; t++; } } // inverse-cipher-ify the decryption round key (fips-197 section 5.3) for (var r = 1; r < rounds; r++) { for (var c = 0; c < 4; c++) { tt = this._Kd[r][c]; this._Kd[r][c] = U1[tt >> 24 & 0xFF] ^ U2[tt >> 16 & 0xFF] ^ U3[tt >> 8 & 0xFF] ^ U4[tt & 0xFF]; } } }; AesJs.prototype.encrypt = function (plaintext) { if (plaintext.length != 16) { throw new Error('invalid plaintext size (must be 16 bytes)'); } var rounds = this._Ke.length - 1; var a = [0, 0, 0, 0]; // convert plaintext to (ints ^ key) var t = convertToInt32(plaintext); for (var i = 0; i < 4; i++) { t[i] ^= this._Ke[0][i]; } // apply round transforms for (var r = 1; r < rounds; r++) { for (var i = 0; i < 4; i++) { a[i] = T1[t[i] >> 24 & 0xff] ^ T2[t[(i + 1) % 4] >> 16 & 0xff] ^ T3[t[(i + 2) % 4] >> 8 & 0xff] ^ T4[t[(i + 3) % 4] & 0xff] ^ this._Ke[r][i]; } t = a.slice(); } // the last round is special var result = createArray(16), tt; for (var i = 0; i < 4; i++) { tt = this._Ke[rounds][i]; result[4 * i] = (S[t[i] >> 24 & 0xff] ^ tt >> 24) & 0xff; result[4 * i + 1] = (S[t[(i + 1) % 4] >> 16 & 0xff] ^ tt >> 16) & 0xff; result[4 * i + 2] = (S[t[(i + 2) % 4] >> 8 & 0xff] ^ tt >> 8) & 0xff; result[4 * i + 3] = (S[t[(i + 3) % 4] & 0xff] ^ tt) & 0xff; } return result; }; AesJs.prototype.decrypt = function (ciphertext) { if (ciphertext.length != 16) { throw new Error('invalid ciphertext size (must be 16 bytes)'); } var rounds = this._Kd.length - 1; var a = [0, 0, 0, 0]; // convert plaintext to (ints ^ key) var t = convertToInt32(ciphertext); for (var i = 0; i < 4; i++) { t[i] ^= this._Kd[0][i]; } // apply round transforms for (var r = 1; r < rounds; r++) { for (var i = 0; i < 4; i++) { a[i] = T5[t[i] >> 24 & 0xff] ^ T6[t[(i + 3) % 4] >> 16 & 0xff] ^ T7[t[(i + 2) % 4] >> 8 & 0xff] ^ T8[t[(i + 1) % 4] & 0xff] ^ this._Kd[r][i]; } t = a.slice(); } // the last round is special var result = createArray(16), tt; for (var i = 0; i < 4; i++) { tt = this._Kd[rounds][i]; result[4 * i] = (Si[t[i] >> 24 & 0xff] ^ tt >> 24) & 0xff; result[4 * i + 1] = (Si[t[(i + 3) % 4] >> 16 & 0xff] ^ tt >> 16) & 0xff; result[4 * i + 2] = (Si[t[(i + 2) % 4] >> 8 & 0xff] ^ tt >> 8) & 0xff; result[4 * i + 3] = (Si[t[(i + 1) % 4] & 0xff] ^ tt) & 0xff; } return result; }; /** * Mode Of Operation - Electonic Codebook (ECB) */ var ModeOfOperationECB = function (key) { if (!(this instanceof ModeOfOperationECB)) { throw Error('AES must be instanitated with `new`'); } this.description = "Electronic Code Block"; this.name = "ecb"; this._aes = new AesJs(key); }; ModeOfOperationECB.prototype.encrypt = function (plaintext) { plaintext = coerceArray(plaintext); if (plaintext.length % 16 !== 0) { throw new Error('invalid plaintext size (must be multiple of 16 bytes)'); } var ciphertext = createArray(plaintext.length); var block = createArray(16); for (var i = 0; i < plaintext.length; i += 16) { copyArray(plaintext, block, 0, i, i + 16); block = this._aes.encrypt(block); copyArray(block, ciphertext, i); } return ciphertext; }; ModeOfOperationECB.prototype.decrypt = function (ciphertext) { ciphertext = coerceArray(ciphertext); if (ciphertext.length % 16 !== 0) { throw new Error('invalid ciphertext size (must be multiple of 16 bytes)'); } var plaintext = createArray(ciphertext.length); var block = createArray(16); for (var i = 0; i < ciphertext.length; i += 16) { copyArray(ciphertext, block, 0, i, i + 16); block = this._aes.decrypt(block); copyArray(block, plaintext, i); } return plaintext; }; /** * Mode Of Operation - Cipher Block Chaining (CBC) */ var ModeOfOperationCBC = function (key, iv) { if (!(this instanceof ModeOfOperationCBC)) { throw Error('AES must be instanitated with `new`'); } this.description = "Cipher Block Chaining"; this.name = "cbc"; if (!iv) { iv = createArray(16); } else if (iv.length != 16) { throw new Error('invalid initialation vector size (must be 16 bytes)'); } this._lastCipherblock = coerceArray(iv, true); this._aes = new AesJs(key); }; ModeOfOperationCBC.prototype.encrypt = function (plaintext) { plaintext = coerceArray(plaintext); if (plaintext.length % 16 !== 0) { throw new Error('invalid plaintext size (must be multiple of 16 bytes)'); } var ciphertext = createArray(plaintext.length); var block = createArray(16); for (var i = 0; i < plaintext.length; i += 16) { copyArray(plaintext, block, 0, i, i + 16); for (var j = 0; j < 16; j++) { block[j] ^= this._lastCipherblock[j]; } this._lastCipherblock = this._aes.encrypt(block); copyArray(this._lastCipherblock, ciphertext, i); } return ciphertext; }; ModeOfOperationCBC.prototype.decrypt = function (ciphertext) { ciphertext = coerceArray(ciphertext); if (ciphertext.length % 16 !== 0) { throw new Error('invalid ciphertext size (must be multiple of 16 bytes)'); } var plaintext = createArray(ciphertext.length); var block = createArray(16); for (var i = 0; i < ciphertext.length; i += 16) { copyArray(ciphertext, block, 0, i, i + 16); block = this._aes.decrypt(block); for (var j = 0; j < 16; j++) { plaintext[i + j] = block[j] ^ this._lastCipherblock[j]; } copyArray(ciphertext, this._lastCipherblock, 0, i, i + 16); } return plaintext; }; /** * Mode Of Operation - Cipher Feedback (CFB) */ var ModeOfOperationCFB = function (key, iv, segmentSize) { if (!(this instanceof ModeOfOperationCFB)) { throw Error('AES must be instanitated with `new`'); } this.description = "Cipher Feedback"; this.name = "cfb"; if (!iv) { iv = createArray(16); } else if (iv.length != 16) { throw new Error('invalid initialation vector size (must be 16 size)'); } if (!segmentSize) { segmentSize = 1; } this.segmentSize = segmentSize; this._shiftRegister = coerceArray(iv, true); this._aes = new AesJs(key); }; ModeOfOperationCFB.prototype.encrypt = function (plaintext) { if (plaintext.length % this.segmentSize != 0) { throw new Error('invalid plaintext size (must be segmentSize bytes)'); } var encrypted = coerceArray(plaintext, true); var xorSegment; for (var i = 0; i < encrypted.length; i += this.segmentSize) { xorSegment = this._aes.encrypt(this._shiftRegister); for (var j = 0; j < this.segmentSize; j++) { encrypted[i + j] ^= xorSegment[j]; } // Shift the register copyArray(this._shiftRegister, this._shiftRegister, 0, this.segmentSize); copyArray(encrypted, this._shiftRegister, 16 - this.segmentSize, i, i + this.segmentSize); } return encrypted; }; ModeOfOperationCFB.prototype.decrypt = function (ciphertext) { if (ciphertext.length % this.segmentSize != 0) { throw new Error('invalid ciphertext size (must be segmentSize bytes)'); } var plaintext = coerceArray(ciphertext, true); var xorSegment; for (var i = 0; i < plaintext.length; i += this.segmentSize) { xorSegment = this._aes.encrypt(this._shiftRegister); for (var j = 0; j < this.segmentSize; j++) { plaintext[i + j] ^= xorSegment[j]; } // Shift the register copyArray(this._shiftRegister, this._shiftRegister, 0, this.segmentSize); copyArray(ciphertext, this._shiftRegister, 16 - this.segmentSize, i, i + this.segmentSize); } return plaintext; }; /** * Mode Of Operation - Output Feedback (OFB) */ var ModeOfOperationOFB = function (key, iv) { if (!(this instanceof ModeOfOperationOFB)) { throw Error('AES must be instanitated with `new`'); } this.description = "Output Feedback"; this.name = "ofb"; if (!iv) { iv = createArray(16); } else if (iv.length != 16) { throw new Error('invalid initialation vector size (must be 16 bytes)'); } this._lastPrecipher = coerceArray(iv, true); this._lastPrecipherIndex = 16; this._aes = new AesJs(key); }; ModeOfOperationOFB.prototype.encrypt = function (plaintext) { var encrypted = coerceArray(plaintext, true); for (var i = 0; i < encrypted.length; i++) { if (this._lastPrecipherIndex === 16) { this._lastPrecipher = this._aes.encrypt(this._lastPrecipher); this._lastPrecipherIndex = 0; } encrypted[i] ^= this._lastPrecipher[this._lastPrecipherIndex++]; } return encrypted; }; // Decryption is symetric ModeOfOperationOFB.prototype.decrypt = ModeOfOperationOFB.prototype.encrypt; /** * Counter object for CTR common mode of operation */ var Counter = function (initialValue) { if (!(this instanceof Counter)) { throw Error('Counter must be instanitated with `new`'); } // We allow 0, but anything false-ish uses the default 1 if (initialValue !== 0 && !initialValue) { initialValue = 1; } if (typeof initialValue === 'number') { this._counter = createArray(16); this.setValue(initialValue); } else { this.setBytes(initialValue); } }; Counter.prototype.setValue = function (value) { if (typeof value !== 'number' || parseInt(value) != value) { throw new Error('invalid counter value (must be an integer)'); } // We cannot safely handle numbers beyond the safe range for integers if (value > Number.MAX_SAFE_INTEGER) { throw new Error('integer value out of safe range'); } for (var index = 15; index >= 0; --index) { this._counter[index] = value % 256; value = parseInt(value / 256); } }; Counter.prototype.setBytes = function (bytes) { bytes = coerceArray(bytes, true); if (bytes.length != 16) { throw new Error('invalid counter bytes size (must be 16 bytes)'); } this._counter = bytes; }; Counter.prototype.increment = function () { for (var i = 15; i >= 0; i--) { if (this._counter[i] === 255) { this._counter[i] = 0; } else { this._counter[i]++; break; } } }; /** * Mode Of Operation - Counter (CTR) */ var ModeOfOperationCTR = function (key, counter) { if (!(this instanceof ModeOfOperationCTR)) { throw Error('AES must be instanitated with `new`'); } this.description = "Counter"; this.name = "ctr"; if (!(counter instanceof Counter)) { counter = new Counter(counter); } this._counter = counter; this._remainingCounter = null; this._remainingCounterIndex = 16; this._aes = new AesJs(key); }; ModeOfOperationCTR.prototype.encrypt = function (plaintext) { var encrypted = coerceArray(plaintext, true); for (var i = 0; i < encrypted.length; i++) { if (this._remainingCounterIndex === 16) { this._remainingCounter = this._aes.encrypt(this._counter._counter); this._remainingCounterIndex = 0; this._counter.increment(); } encrypted[i] ^= this._remainingCounter[this._remainingCounterIndex++]; } return encrypted; }; // Decryption is symetric ModeOfOperationCTR.prototype.decrypt = ModeOfOperationCTR.prototype.encrypt; /////////////////////// // Padding // See:https://tools.ietf.org/html/rfc2315 function pkcs7pad(data) { data = coerceArray(data, true); var padder = 16 - data.length % 16; var result = createArray(data.length + padder); copyArray(data, result); for (var i = data.length; i < result.length; i++) { result[i] = padder; } return result; } function pkcs7strip(data) { data = coerceArray(data, true); if (data.length < 16) { throw new Error('PKCS#7 invalid length'); } var padder = data[data.length - 1]; if (padder > 16) { throw new Error('PKCS#7 padding byte out of range'); } var length = data.length - padder; for (var i = 0; i < padder; i++) { if (data[length + i] !== padder) { throw new Error('PKCS#7 invalid padding byte'); } } var result = createArray(length); copyArray(data, result, 0, 0, length); return result; } /////////////////////// // Exporting // The block cipher const aesjs = { AES: AesJs, Counter: Counter, ModeOfOperation: { ecb: ModeOfOperationECB, cbc: ModeOfOperationCBC, cfb: ModeOfOperationCFB, ofb: ModeOfOperationOFB, ctr: ModeOfOperationCTR }, utils: { hex: convertHex, utf8: convertUtf8 }, padding: { pkcs7: { pad: pkcs7pad, strip: pkcs7strip } }, _arrayTest: { coerceArray: coerceArray, createArray: createArray, copyArray: copyArray } }; function getNaluLength$2(data) { let length = data[3] | data[2] << 8 | data[1] << 16 | data[0] << 24; return length; } // aes-256-ctr 解密 function aes256ctrDecrypt(arrayBuffer, key, iv) { let isHevc = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; key = new Uint8Array(key); iv = new Uint8Array(iv); const totalLength = arrayBuffer.byteLength; // 17(23)/27(39)[是否i帧] ,0/1(), 0,0,0, 0,0,0,0,[NALU],0,0,0,0,[NALU].... 只需要解密NALU部分数据。 // 其中NALU部分需要跳过两个(nalu头 + 再加一个字节数据) let startIndex = 5; while (startIndex < totalLength) { let tempNaluLength = getNaluLength$2(arrayBuffer.slice(startIndex, startIndex + 4)); if (tempNaluLength > totalLength) { break; } let naluType = arrayBuffer[startIndex + 4]; // 这边只是判断了h264的加密规则,265的目前没有做处理 let needDecrypt = false; if (isHevc) { naluType = naluType >>> 1 & 0x3f; needDecrypt = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 16, 17, 18, 19, 20, 21].includes(naluType); } else { naluType = naluType & 0x1f; needDecrypt = naluType === 1 || naluType === 5; } // if (needDecrypt) { const tempNalu = arrayBuffer.slice(startIndex + 4 + 2, startIndex + 4 + tempNaluLength); let aesCtr = new aesjs.ModeOfOperation.ctr(key, new aesjs.Counter(iv)); const decryptMsg = aesCtr.decrypt(tempNalu); // release memory aesCtr = null; arrayBuffer.set(decryptMsg, startIndex + 4 + 2); } startIndex = startIndex + 4 + tempNaluLength; } return arrayBuffer; } function aes256ctrDecryptAacAudio(arrayBuffer, key, iv) { // const totalLength = arrayBuffer.byteLength; if (totalLength <= 30) { return arrayBuffer; } // const step = 30 + 2; const tempNalu = arrayBuffer.slice(step); let aesCtr = new aesjs.ModeOfOperation.ctr(key, new aesjs.Counter(iv)); const decryptMsg = aesCtr.decrypt(tempNalu); // release memory aesCtr = null; arrayBuffer.set(decryptMsg, step); return arrayBuffer; } function getM7SCryptoStreamKey(path) { return new Promise((resolve, reject) => { ajax.get(path).then(res => { resolve(res); }).catch(e => { reject(e); }); }); } /* eslint-disable no-bitwise, no-mixed-operators, complexity */ const DECRYPT = 0; const ROUND = 32; const BLOCK = 16; const Sbox = [0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05, 0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99, 0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, 0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62, 0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, 0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6, 0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, 0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8, 0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, 0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35, 0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, 0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87, 0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52, 0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e, 0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5, 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1, 0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55, 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3, 0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60, 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f, 0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f, 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51, 0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f, 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8, 0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0, 0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84, 0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48]; const CK = [0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269, 0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9, 0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249, 0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9, 0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229, 0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299, 0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209, 0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279]; /** * 16 进制串转字节数组 */ function hexToArray(str) { const arr = []; for (let i = 0, len = str.length; i < len; i += 2) { arr.push(parseInt(str.substr(i, 2), 16)); } return arr; } /** * 字节数组转 16 进制串 */ function ArrayToHex(arr) { return arr.map(item => { item = item.toString(16); return item.length === 1 ? '0' + item : item; }).join(''); } /** * utf8 串转字节数组 */ function utf8ToArray(str) { const arr = []; for (let i = 0, len = str.length; i < len; i++) { const point = str.codePointAt(i); if (point <= 0x007f) { // 单字节,标量值:00000000 00000000 0zzzzzzz arr.push(point); } else if (point <= 0x07ff) { // 双字节,标量值:00000000 00000yyy yyzzzzzz arr.push(0xc0 | point >>> 6); // 110yyyyy(0xc0-0xdf) arr.push(0x80 | point & 0x3f); // 10zzzzzz(0x80-0xbf) } else if (point <= 0xD7FF || point >= 0xE000 && point <= 0xFFFF) { // 三字节:标量值:00000000 xxxxyyyy yyzzzzzz arr.push(0xe0 | point >>> 12); // 1110xxxx(0xe0-0xef) arr.push(0x80 | point >>> 6 & 0x3f); // 10yyyyyy(0x80-0xbf) arr.push(0x80 | point & 0x3f); // 10zzzzzz(0x80-0xbf) } else if (point >= 0x010000 && point <= 0x10FFFF) { // 四字节:标量值:000wwwxx xxxxyyyy yyzzzzzz i++; arr.push(0xf0 | point >>> 18 & 0x1c); // 11110www(0xf0-0xf7) arr.push(0x80 | point >>> 12 & 0x3f); // 10xxxxxx(0x80-0xbf) arr.push(0x80 | point >>> 6 & 0x3f); // 10yyyyyy(0x80-0xbf) arr.push(0x80 | point & 0x3f); // 10zzzzzz(0x80-0xbf) } else { // 五、六字节,暂时不支持 arr.push(point); throw new Error('input is not supported'); } } return arr; } /** * 字节数组转 utf8 串 */ function arrayToUtf8(arr) { const str = []; for (let i = 0, len = arr.length; i < len; i++) { if (arr[i] >= 0xf0 && arr[i] <= 0xf7) { // 四字节 str.push(String.fromCodePoint(((arr[i] & 0x07) << 18) + ((arr[i + 1] & 0x3f) << 12) + ((arr[i + 2] & 0x3f) << 6) + (arr[i + 3] & 0x3f))); i += 3; } else if (arr[i] >= 0xe0 && arr[i] <= 0xef) { // 三字节 str.push(String.fromCodePoint(((arr[i] & 0x0f) << 12) + ((arr[i + 1] & 0x3f) << 6) + (arr[i + 2] & 0x3f))); i += 2; } else if (arr[i] >= 0xc0 && arr[i] <= 0xdf) { // 双字节 str.push(String.fromCodePoint(((arr[i] & 0x1f) << 6) + (arr[i + 1] & 0x3f))); i++; } else { // 单字节 str.push(String.fromCodePoint(arr[i])); } } return str.join(''); } /** * 32 比特循环左移 */ function rotl(x, n) { const s = n & 31; return x << s | x >>> 32 - s; } /** * 非线性变换 */ function byteSub(a) { return (Sbox[a >>> 24 & 0xFF] & 0xFF) << 24 | (Sbox[a >>> 16 & 0xFF] & 0xFF) << 16 | (Sbox[a >>> 8 & 0xFF] & 0xFF) << 8 | Sbox[a & 0xFF] & 0xFF; } /** * 线性变换,加密/解密用 */ function l1(b) { return b ^ rotl(b, 2) ^ rotl(b, 10) ^ rotl(b, 18) ^ rotl(b, 24); } /** * 线性变换,生成轮密钥用 */ function l2(b) { return b ^ rotl(b, 13) ^ rotl(b, 23); } /** * 以一组 128 比特进行加密/解密操作 */ function sms4Crypt(input, output, roundKey) { const x = new Array(4); // 字节数组转成字数组(此处 1 字 = 32 比特) const tmp = new Array(4); for (let i = 0; i < 4; i++) { tmp[0] = input[4 * i] & 0xff; tmp[1] = input[4 * i + 1] & 0xff; tmp[2] = input[4 * i + 2] & 0xff; tmp[3] = input[4 * i + 3] & 0xff; x[i] = tmp[0] << 24 | tmp[1] << 16 | tmp[2] << 8 | tmp[3]; } // x[i + 4] = x[i] ^ l1(byteSub(x[i + 1] ^ x[i + 2] ^ x[i + 3] ^ roundKey[i])) for (let r = 0, mid; r < 32; r += 4) { mid = x[1] ^ x[2] ^ x[3] ^ roundKey[r + 0]; x[0] ^= l1(byteSub(mid)); // x[4] mid = x[2] ^ x[3] ^ x[0] ^ roundKey[r + 1]; x[1] ^= l1(byteSub(mid)); // x[5] mid = x[3] ^ x[0] ^ x[1] ^ roundKey[r + 2]; x[2] ^= l1(byteSub(mid)); // x[6] mid = x[0] ^ x[1] ^ x[2] ^ roundKey[r + 3]; x[3] ^= l1(byteSub(mid)); // x[7] } // 反序变换 for (let j = 0; j < 16; j += 4) { output[j] = x[3 - j / 4] >>> 24 & 0xff; output[j + 1] = x[3 - j / 4] >>> 16 & 0xff; output[j + 2] = x[3 - j / 4] >>> 8 & 0xff; output[j + 3] = x[3 - j / 4] & 0xff; } } /** * 密钥扩展算法 */ function sms4KeyExt(key, roundKey, cryptFlag) { const x = new Array(4); // 字节数组转成字数组(此处 1 字 = 32 比特) const tmp = new Array(4); for (let i = 0; i < 4; i++) { tmp[0] = key[0 + 4 * i] & 0xff; tmp[1] = key[1 + 4 * i] & 0xff; tmp[2] = key[2 + 4 * i] & 0xff; tmp[3] = key[3 + 4 * i] & 0xff; x[i] = tmp[0] << 24 | tmp[1] << 16 | tmp[2] << 8 | tmp[3]; } // 与系统参数做异或 x[0] ^= 0xa3b1bac6; x[1] ^= 0x56aa3350; x[2] ^= 0x677d9197; x[3] ^= 0xb27022dc; // roundKey[i] = x[i + 4] = x[i] ^ l2(byteSub(x[i + 1] ^ x[i + 2] ^ x[i + 3] ^ CK[i])) for (let r = 0, mid; r < 32; r += 4) { mid = x[1] ^ x[2] ^ x[3] ^ CK[r + 0]; roundKey[r + 0] = x[0] ^= l2(byteSub(mid)); // x[4] mid = x[2] ^ x[3] ^ x[0] ^ CK[r + 1]; roundKey[r + 1] = x[1] ^= l2(byteSub(mid)); // x[5] mid = x[3] ^ x[0] ^ x[1] ^ CK[r + 2]; roundKey[r + 2] = x[2] ^= l2(byteSub(mid)); // x[6] mid = x[0] ^ x[1] ^ x[2] ^ CK[r + 3]; roundKey[r + 3] = x[3] ^= l2(byteSub(mid)); // x[7] } // 解密时使用反序的轮密钥 if (cryptFlag === DECRYPT) { for (let r = 0, mid; r < 16; r++) { mid = roundKey[r]; roundKey[r] = roundKey[31 - r]; roundKey[31 - r] = mid; } } } function sm4(inArray, key, cryptFlag) { let { padding = 'pkcs#7', mode, iv = [], output = 'string' } = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; // 默认走 ECB 模式 if (mode === 'cbc') { // CBC 模式, if (typeof iv === 'string') iv = hexToArray(iv); if (iv.length !== 128 / 8) { // iv 不是 128 比特 throw new Error('iv is invalid'); } } // 检查 key if (typeof key === 'string') key = hexToArray(key); if (key.length !== 128 / 8) { // key 不是 128 比特 throw new Error('key is invalid'); } // 检查输入 if (typeof inArray === 'string') { if (cryptFlag !== DECRYPT) { // 加密,输入为 utf8 串 inArray = utf8ToArray(inArray); } else { // 解密,输入为 16 进制串 inArray = hexToArray(inArray); } } else { inArray = [...inArray]; } // 新增填充,sm4 是 16 个字节一个分组,所以统一走到 pkcs#7 if ((padding === 'pkcs#5' || padding === 'pkcs#7') && cryptFlag !== DECRYPT) { const paddingCount = BLOCK - inArray.length % BLOCK; for (let i = 0; i < paddingCount; i++) inArray.push(paddingCount); } // 生成轮密钥 const roundKey = new Array(ROUND); sms4KeyExt(key, roundKey, cryptFlag); const outArray = []; let lastVector = iv; let restLen = inArray.length; let point = 0; while (restLen >= BLOCK) { const input = inArray.slice(point, point + 16); const output = new Array(16); if (mode === 'cbc') { for (let i = 0; i < BLOCK; i++) { if (cryptFlag !== DECRYPT) { // 加密过程在组加密前进行异或 input[i] ^= lastVector[i]; } } } sms4Crypt(input, output, roundKey); for (let i = 0; i < BLOCK; i++) { if (mode === 'cbc') { if (cryptFlag === DECRYPT) { // 解密过程在组解密后进行异或 output[i] ^= lastVector[i]; } } outArray[point + i] = output[i]; } if (mode === 'cbc') { if (cryptFlag !== DECRYPT) { // 使用上一次输出作为加密向量 lastVector = output; } else { // 使用上一次输入作为解密向量 lastVector = input; } } restLen -= BLOCK; point += BLOCK; } // 去除填充,sm4 是 16 个字节一个分组,所以统一走到 pkcs#7 if ((padding === 'pkcs#5' || padding === 'pkcs#7') && cryptFlag === DECRYPT) { const len = outArray.length; const paddingCount = outArray[len - 1]; for (let i = 1; i <= paddingCount; i++) { if (outArray[len - i] !== paddingCount) throw new Error('padding is invalid'); } outArray.splice(len - paddingCount, paddingCount); } // 调整输出 if (output !== 'array') { if (cryptFlag !== DECRYPT) { // 加密,输出转 16 进制串 return ArrayToHex(outArray); } else { // 解密,输出转 utf8 串 return arrayToUtf8(outArray); } } else { return outArray; } } // module.exports = { // encrypt(inArray, key, options) { // return sm4(inArray, key, 1, options) // }, // // sm4.decrypt(frame, key, { mode: 'cbc', iv: key, output: 'array' }) // decrypt(inArray, key, options) { // return sm4(inArray, key, 0, options) // } // } function getNaluLength$1(data) { let length = data[3] | data[2] << 8 | data[1] << 16 | data[0] << 24; return length; } function sm4Decrypt(arrayBuffer, key) { let isHevc = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; const totalLength = arrayBuffer.byteLength; // 17(23)/27(39)[是否i帧] ,0/1(), 0,0,0, 0,0,0,0,[NALU],0,0,0,0,[NALU].... 只需要解密NALU部分数据。 // 其中NALU部分需要跳过两个(nalu头 + 再加一个字节数据) let startIndex = 5; while (startIndex < totalLength) { let tempNaluLength = getNaluLength$1(arrayBuffer.slice(startIndex, startIndex + 4)); if (tempNaluLength > totalLength) { break; } // let naluType = arrayBuffer[startIndex + 4]; let needDecrypt = false; if (isHevc) { naluType = naluType >>> 1 & 0x3f; needDecrypt = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 16, 17, 18, 19, 20, 21].includes(naluType); } else { naluType = naluType & 0x1f; needDecrypt = naluType === 1 || naluType === 5; } // if (needDecrypt) { const tempNalu = arrayBuffer.slice(startIndex + 4 + 2, startIndex + 4 + tempNaluLength); const decryptMsg = sm4(tempNalu, key, 0, { padding: 'none', output: 'array' }); arrayBuffer.set(decryptMsg, startIndex + 4 + 2); } startIndex = startIndex + 4 + tempNaluLength; } return arrayBuffer; } function decrypt(arrayBuffer, key, iv) { // start with 2 index, for (let i = 2; i < arrayBuffer.length; ++i) { const ii = i - 2; const a = key[ii % key.length]; const b = iv[ii % iv.length]; arrayBuffer[i] = arrayBuffer[i] ^ (a ^ b); } return arrayBuffer; } function getNaluLength(data) { let length = data[3] | data[2] << 8 | data[1] << 16 | data[0] << 24; return length; } function xorDecrypt(arrayBuffer, key, iv) { let isHevc = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; const totalLength = arrayBuffer.byteLength; // 17(23)/27(39)[是否i帧] ,0/1(), 0,0,0, 0,0,0,0,[NALU],0,0,0,0,[NALU].... 只需要解密NALU部分数据。 // 其中NALU部分需要跳过两个(nalu头 + 再加一个字节数据) let startIndex = 5; while (startIndex < totalLength) { let tempNaluLength = getNaluLength(arrayBuffer.slice(startIndex, startIndex + 4)); if (tempNaluLength > totalLength) { break; } // let naluType = arrayBuffer[startIndex + 4]; let needDecrypt = false; if (isHevc) { naluType = naluType >>> 1 & 0x3f; needDecrypt = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 16, 17, 18, 19, 20, 21].includes(naluType); } else { naluType = naluType & 0x1f; needDecrypt = naluType === 1 || naluType === 5; } // if (needDecrypt) { const tempNalu = arrayBuffer.slice(startIndex + 4, startIndex + 4 + tempNaluLength); // 跳过2个字段 const decryptMsg = decrypt(tempNalu, key, iv); arrayBuffer.set(decryptMsg, startIndex + 4); } startIndex = startIndex + 4 + tempNaluLength; } return arrayBuffer; } class CommonLoader extends Emitter { constructor(player) { super(); this.TAG_NAME = 'CommonDemux'; this.player = player; this.stopId = null; this.firstTimestamp = null; this.startTimestamp = null; this.preDelayTimestamp = null; this.preLoopTimestamp = null; this.bufferStartDts = null; this.bufferStartLocalTs = null; this.preIframeTs = null; this.preFrameTs = null; this.preTimestamp = null; this.preTimestampDuration = 0; this.prevPayloadBufferSize = 0; this.isStreamTsMoreThanLocal = false; this.delay = -1; this.pushLatestDelay = -1; this.bufferList = []; this.historyIntervalDiffTimeList = []; this.playbackStreamFps = null; this.playbackStreamAudioFps = null; this.playbackStreamVideoFps = null; this.dropping = false; this.isPushDropping = false; this.nalUnitSize = null; this.initInterval(); this.player.debug.log('CommonDemux', 'init'); } destroy() { this.bufferList = []; this.historyIntervalDiffTimeList = []; this.playbackStreamFps = null; this.playbackStreamAudioFps = null; this.playbackStreamVideoFps = null; this.clearStopInterval(); this.firstTimestamp = null; this.startTimestamp = null; this.bufferStartDts = null; this.bufferStartLocalTs = null; this.preDelayTimestamp = null; this.preLoopTimestamp = null; this.preIframeTs = null; this.preTimestamp = null; this.preTimestampDuration = 0; this.prevPayloadBufferSize = 0; this.isStreamTsMoreThanLocal = false; this.delay = -1; this.pushLatestDelay = -1; this.dropping = false; this.isPushDropping = false; this.nalUnitSize = null; this.off(); this.player.debug.log('CommonDemux', 'destroy'); } isDropping() { return this.dropping || this.isPushDropping; } getDelay(timestamp, type) { if (!timestamp || !this.player.isDemuxDecodeFirstIIframeInit()) { return -1; } if (type === MEDIA_TYPE.audio) { return this.delay; } // if (this.preDelayTimestamp && this.preDelayTimestamp > timestamp) { if (this.preDelayTimestamp - timestamp > 1000) { this.player.debug.warn('CommonDemux', `getDelay() and preDelayTimestamp is ${this.preDelayTimestamp} > timestamp is ${timestamp} more than ${this.preDelayTimestamp - timestamp}ms and return ${this.delay}`); } this.preDelayTimestamp = timestamp; return this.delay; } if (!this.firstTimestamp) { this.firstTimestamp = timestamp; this.startTimestamp = Date.now(); this.delay = -1; } else { if (timestamp) { const localTimestamp = Date.now() - this.startTimestamp; const timeTimestamp = timestamp - this.firstTimestamp; // console.error(`localTimestamp is ${localTimestamp}, timeTimestamp is ${timeTimestamp}`) if (localTimestamp >= timeTimestamp) { this.isStreamTsMoreThanLocal = false; this.delay = localTimestamp - timeTimestamp; } else { // maybe some stream ts more large than local ts this.isStreamTsMoreThanLocal = true; this.delay = timeTimestamp - localTimestamp; } } } this.preDelayTimestamp = timestamp; return this.delay; } getDelayNotUpdateDelay(timestamp, type) { if (!timestamp || !this.player.isDemuxDecodeFirstIIframeInit()) { return -1; } if (type === MEDIA_TYPE.audio) { return this.pushLatestDelay; } if (this.preDelayTimestamp && this.preDelayTimestamp - timestamp > 1000) { this.player.debug.warn('CommonDemux', `getDelayNotUpdateDelay() and preDelayTimestamp is ${this.preDelayTimestamp} > timestamp is ${timestamp} more than ${this.preDelayTimestamp - timestamp}ms and return -1`); return -1; } if (!this.firstTimestamp) { return -1; } else { let delay = -1; if (timestamp) { const localTimestamp = Date.now() - this.startTimestamp; const timeTimestamp = timestamp - this.firstTimestamp; if (localTimestamp >= timeTimestamp) { delay = localTimestamp - timeTimestamp; } else { // maybe some stream ts more large than local ts delay = timeTimestamp - localTimestamp; } } return delay; } } resetDelay() { this.firstTimestamp = null; this.startTimestamp = null; this.delay = -1; this.dropping = false; } resetAllDelay() { this.resetDelay(); this.preDelayTimestamp = null; } // initInterval() { if (this.player.isUseHls265()) { this.player.debug.log('CommonDemux', `initInterval() and is hls and support hls265 so return`); return; } if (this.player.getStreamType().indexOf(PLAYER_STREAM_TYPE.worker) !== -1) { this.player.debug.log('CommonDemux', `initInterval() and is worker stream so return`); return; } if (this.player.isPlaybackCacheBeforeDecodeForFpsRender()) { this.player.debug.log('CommonDemux', `initInterval() and playback and playbackIsCacheBeforeDecodeForFpsRender is true so return`); return; } this.player.debug.log('CommonDemux', `setInterval()`); this._loop(); this.stopId = setInterval(() => { let nowTime = new Date().getTime(); if (!this.preLoopTimestamp) { this.preLoopTimestamp = nowTime; } const diffTime = nowTime - this.preLoopTimestamp; this.updateHistoryIntervalDiffTimeList(diffTime); if (diffTime > 100) { this.player.debug.warn('CommonDemux', `loop demux diff time is ${diffTime}`); } this._loop(); this.preLoopTimestamp = new Date().getTime(); }, DEMUX_LOOP_INTERVAL_TIMES); } clearStopInterval() { if (this.stopId) { clearInterval(this.stopId); this.stopId = null; } } updateHistoryIntervalDiffTimeList(diffTime) { if (this.historyIntervalDiffTimeList.length > 5) { this.historyIntervalDiffTimeList.shift(); } this.historyIntervalDiffTimeList.push(diffTime); } isHistoryIntervalDiffTimeAllLarge() { if (this.historyIntervalDiffTimeList.length < 5) { return false; } for (let i = 0; i < this.historyIntervalDiffTimeList.length; i++) { // less than 900ms if (this.historyIntervalDiffTimeList[i] < 900) { return false; } } return true; } // initPlaybackCacheLoop() { this.clearStopInterval(); const loop = () => { let data = null; if (this.bufferList.length) { data = this.bufferList.shift(); this._doDecoderDecode(data); } }; loop(); const fragDuration = Math.ceil(1000 / (this.playbackStreamFps * this.player.getPlaybackRate())); this.player.debug.log('CommonDemux', `initPlaybackCacheLoop() and fragDuration is ${fragDuration}, playbackStreamFps is ${this.playbackStreamFps}, playbackRate is ${this.player.getPlaybackRate()}`); this.stopId = setInterval(loop, fragDuration); } _loop() { let data; const videoBuffer = this.player._opt.videoBuffer; const videoBufferDelay = this.player._opt.videoBufferDelay; const isPlayer = this.player._opt.playType === PLAY_TYPE.player; if (this.bufferList.length) { if (this.isPushDropping) { this.player.debug.warn('CommonDemux', `_loop isPushDropping is true and bufferList length is ${this.bufferList.length}`); return; } if (this.dropping) { data = this.bufferList.shift(); this.player.debug.warn('CommonDemux', `_loop is dropping and data.ts is ${data.ts}, data.type is ${data.type}, data.isIFrame is ${data.isIFrame}, delay is ${this.delay} ,buffer list is ${this.bufferList.length}`); while (!data.isIFrame && this.bufferList.length) { data = this.bufferList.shift(); } const tempDelay = this.getDelayNotUpdateDelay(data.ts, data.type); // i frame // min delay if (data.isIFrame && tempDelay <= this.getNotDroppingDelayTs()) { this.player.debug.log('CommonDemux', `_loop data isIFrame is true and delay is ${this.delay}`); this.dropping = false; this._doDecoderDecode(data); this._decodeNext(data); } } else { if (this.player.isPlayback() || this.player.isPlayUseMSE() || videoBuffer === 0) { while (this.bufferList.length) { data = this.bufferList.shift(); this._doDecoderDecode(data); } } else { data = this.bufferList[0]; if (this.getDelay(data.ts, data.type) === -1) { this.player.debug.log('CommonDemux', `delay is -1 and data.ts is ${data.ts} data.type is ${data.type}`); this.bufferList.shift(); this._doDecoderDecode(data); this._decodeNext(data); } else if (this.delay > videoBufferDelay + videoBuffer && isPlayer) { if (this.hasIframeInBufferList()) { this.player.debug.warn('CommonDemux', `_loop delay is ${this.delay}, set dropping is true`); this.resetAllDelay(); this.dropping = true; this.player.updateStats({ isDropping: true }); } else { this.bufferList.shift(); this._doDecoderDecode(data); this._decodeNext(data); } } else { while (this.bufferList.length) { data = this.bufferList[0]; if (this.getDelay(data.ts, data.type) > videoBuffer) { this.bufferList.shift(); this._doDecoderDecode(data); } else { if (this.delay < 0) { this.player.debug.warn('CommonDemux', `_loop delay is ${this.delay} bufferList is ${this.bufferList}`); } break; } } } } } } else { if (this.delay !== -1) { this.player.debug.log('CommonDemux', `loop() bufferList is empty and reset delay`); } this.resetAllDelay(); } } // _doDecode(payload, type, ts, isIFrame) { let cts = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : 0; const player = this.player; let options = { ts: ts, cts: cts, type: type, isIFrame: false }; if (this.player.isPlayer()) { if (type === MEDIA_TYPE.video) { if (player._opt.playType === PLAY_TYPE.player) { this.calcNetworkDelay(ts); } } // use offscreen if (player._opt.useWCS && !player._opt.useOffscreen) { if (type === MEDIA_TYPE.video) { options.isIFrame = isIFrame; } this.pushBuffer(payload, options); } else if (player._opt.useMSE) { // use mse if (type === MEDIA_TYPE.video) { options.isIFrame = isIFrame; } this.pushBuffer(payload, options); } else { // wasm if (type === MEDIA_TYPE.video) { player.decoderWorker && player.decoderWorker.decodeVideo(payload, ts, isIFrame); } else if (type === MEDIA_TYPE.audio) { if (player._opt.hasAudio) { player.decoderWorker && player.decoderWorker.decodeAudio(payload, ts); } } } } else if (this.player.isPlayback()) { if (type === MEDIA_TYPE.video) { options.isIFrame = isIFrame; } if (this.player.isPlaybackOnlyDecodeIFrame()) { // only decode video not decode audio if (type === MEDIA_TYPE.video && isIFrame) { this.pushBuffer(payload, options); } } else { // playback if (this.player.isPlaybackCacheBeforeDecodeForFpsRender()) { this.pushBuffer(payload, options); } else { if (this.player.getPlaybackRate() === 1) { this.pushBuffer(payload, options); } else { this.pushBuffer(payload, options, false); } } } } } // for hls _doDecodeByHls(payload, type, ts, isIFrame) { let cts = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : 0; let _isAudioAacCodecPacket = false; // decode aac header if (type === MEDIA_TYPE.audio && isAacCodecPacket(payload)) { this.player.debug.log('CommonDemux', `hls pushBuffer audio ts is ${ts}, isAacCodecPacket is true`); _isAudioAacCodecPacket = true; // flv recorder if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); this.player.recorder.addAACSequenceHeader(payloadCopy, ts); } } let _isVideoSequenceHeader = false; // init video sequence header if (type === MEDIA_TYPE.video && isIFrame && isVideoSequenceHeader(payload)) { this.player.debug.log('CommonDemux', `hls pushBuffer video ts is ${ts}, isVideoSequenceHeader is true`); _isVideoSequenceHeader = true; // flv recorder if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); this.player.recorder.addVideoSequenceHeader(payloadCopy, ts); } } // recording if (this.player.recording && isFalse(_isVideoSequenceHeader) && isFalse(_isAudioAacCodecPacket)) { this.handleRecording(payload, type, ts, isIFrame, cts); } if (type === MEDIA_TYPE.video) { this._doDecoderDecode({ ts: ts, cts: cts, payload: payload, type: MEDIA_TYPE.video, isIFrame: isIFrame }); } else if (type === MEDIA_TYPE.audio) { this._doDecoderDecode({ ts: ts, payload: payload, type: MEDIA_TYPE.audio }); } } // for fmp4 _doDecodeByFmp4(payload, type, ts, isIFrame) { let cts = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : 0; this._doDecode(payload, type, ts, isIFrame, cts); } _doDecodeByTs(payload, type, ts, isIFrame) { let cts = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : 0; this._doDecode(payload, type, ts, isIFrame, cts); } _decodeNext(data) { const ts = data.ts; if (this.bufferList.length === 0) { return; } let nextData = this.bufferList[0]; const diff = nextData.ts - ts; const isVideoAndNextAudio = data.type === MEDIA_TYPE.video && nextData.type === MEDIA_TYPE.audio; const isSqeHeader = data.type === MEDIA_TYPE.video && isVideoSequenceHeader(data.payload); // 如果 if (diff <= DEMUX_LOOP_INTERVAL_TIMES || isVideoAndNextAudio || isSqeHeader) { this.player.debug.log('CommonDemux', `decode data type is ${data.type} and ts is ${ts} next data type is ${nextData.type} ts is ${nextData.ts} diff is ${diff} and isVideoAndNextAudio is ${isVideoAndNextAudio} and isVideoSqeHeader is ${isSqeHeader}`); this.bufferList.shift(); this._doDecoderDecode(nextData); } } _doDecoderDecode(data) { const player = this.player; const { webcodecsDecoder, mseDecoder } = player; // player.debug.log('CommonDemux', `_doDecoderDecode data.type is ${data.type} data.ts is ${data.ts}`); if (this.player.isPlayer()) { this.player.updateStats({ buf: this.delay }); } if (data.type === MEDIA_TYPE.audio) { if (player._opt.hasAudio) { if (player._opt.useMSE && player._opt.mseDecodeAudio) { mseDecoder.decodeAudio(data.payload, data.ts); } else { player.decoderWorker && player.decoderWorker.decodeAudio(data.payload, data.ts); } } } else if (data.type === MEDIA_TYPE.video) { if (player._opt.isEmitSEI) { this.findSei(data.payload, data.ts); } if (player._opt.useWCS && !player._opt.useOffscreen) { webcodecsDecoder.decodeVideo(data.payload, data.ts, data.isIFrame, data.cts); } else if (player._opt.useMSE) { mseDecoder.decodeVideo(data.payload, data.ts, data.isIFrame, data.cts); } else { player.decoderWorker && player.decoderWorker.decodeVideo(data.payload, data.ts, data.isIFrame); } } } pushBuffer(payload, options) { let isPushBuffer = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true; // this.player.debug.error('CommonDemux', ` // payload ts is ${options.ts} and // type is ${options.type} and isIFrame is ${options.isIFrame}`); const videoBuffer = this.player._opt.videoBuffer; const videoBufferDelay = this.player._opt.videoBufferDelay; const isPlayer = this.player.isPlayer(); // decode aac header if (options.type === MEDIA_TYPE.audio && isAacCodecPacket(payload)) { this.player.debug.log('CommonDemux', `pushBuffer() audio ts is ${options.ts}, isAacCodecPacket is true`); // flv recorder if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); this.player.recorder.addAACSequenceHeader(payloadCopy, options.ts); } this._doDecoderDecode({ ts: options.ts, payload: payload, type: MEDIA_TYPE.audio }); return; } // init video sequence header if (options.type === MEDIA_TYPE.video && options.isIFrame && isVideoSequenceHeader(payload)) { this.player.debug.log('CommonDemux', `pushBuffer() video ts is ${options.ts}, isVideoSequenceHeader is true`); // flv recorder if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); this.player.recorder.addVideoSequenceHeader(payloadCopy, options.ts); } this._doDecoderDecode({ ts: options.ts, payload: payload, type: MEDIA_TYPE.video, isIFrame: options.isIFrame, cts: options.cts }); return; } // recording if (this.player.recording) { this.handleRecording(payload, options.type, options.ts, options.isIFrame, options.cts); } if (isPlayer) { // for player if (this.preTimestampDuration > 0 && this.preTimestamp > 0 && options.type === MEDIA_TYPE.video) { const diff = options.ts - this.preTimestamp; const maxDiff = this.preTimestampDuration + this.preTimestampDuration / 2; if (diff >= maxDiff) { this.player.debug.log('CommonDemux', `pushBuffer() video ts is ${options.ts}, preTimestamp is ${this.preTimestamp}, diff is ${diff} and preTimestampDuration is ${this.preTimestampDuration} and maxDiff is ${maxDiff} maybe trigger black screen or flower screen`); } } // for player if (this.preTimestamp > 0 && options.ts < this.preTimestamp && options.type === MEDIA_TYPE.video && this.preTimestamp - options.ts > FRAME_TS_MAX_DIFF) { this.player.debug.warn('CommonDemux', `pushBuffer() video ts is ${options.ts}, preTimestamp is ${this.preTimestamp}, diff is ${this.preTimestamp - options.ts} more than ${FRAME_TS_MAX_DIFF} and resetAllDelay()`); this.resetAllDelay(); } // for player if (options.ts <= this.preTimestamp && this.preTimestamp > 0 && options.type === MEDIA_TYPE.video) { this.player.debug.warn('CommonDemux', `pushBuffer() video and isIFrame is ${options.isIFrame} and ts is ${options.ts} less than (or equal) preTimestamp is ${this.preTimestamp} and payloadBufferSize is ${payload.byteLength} and prevPayloadBufferSize is ${this.prevPayloadBufferSize}`); if (this.player._opt.isDropSameTimestampGop && isFalse(options.isIFrame) && // 非i帧数据 this.player.isDemuxDecodeFirstIIframeInit()) { const hasIframe = this.hasIframeInBufferList(); const isNotPushDropping = isFalse(this.isPushDropping); this.player.debug.log('CommonDemux', `pushBuffer(), isDropSameTimestampGop is true and hasIframe is ${hasIframe} and isNotPushDropping is ${isNotPushDropping} and next drop buffer`); if (hasIframe && isNotPushDropping) { this.dropBuffer$2(); } else { this.clearBuffer(true); } return; } } // for player if (this.player.isDemuxDecodeFirstIIframeInit()) { let pushLatestDelay = this.getDelayNotUpdateDelay(options.ts, options.type); this.pushLatestDelay = pushLatestDelay; const maxDelay = videoBufferDelay + videoBuffer; if (this.player._opt.useMSE) { if (pushLatestDelay > maxDelay && this.delay < maxDelay && this.delay > 0) { if (this.hasIframeInBufferList() && this.isPushDropping === false) { this.player.debug.warn('CommonDemux', `useMSE, pushLatestDelay is ${pushLatestDelay} > ${videoBufferDelay + videoBuffer}, bufferList is ${this.bufferList.length}, delay is ${this.delay} and dropBuffer$2()`); this.dropBuffer$2(); } } } else { if (pushLatestDelay > maxDelay && this.delay < maxDelay && this.delay > 0) { if (this.hasIframeInBufferList() && this.isPushDropping === false) { this.player.debug.warn('CommonDemux', `useWCS, pushLatestDelay is ${pushLatestDelay} > ${videoBufferDelay + videoBuffer},bufferList is ${this.bufferList.length}, delay is ${this.delay} and dropBuffer$2()`); this.dropBuffer$2(); } } } // for player if (this.isHistoryIntervalDiffTimeAllLarge() && isFalse(this.player.visibility)) { if (this.player._opt.useMSE) { if (this.hasIframeInBufferList() && this.isPushDropping === false) { this.player.debug.warn('CommonDemux', `useMSE, page visibility is false and history interval diff is ${this.historyIntervalDiffTimeList.join(',')} and bufferList is ${this.bufferList.length}, delay is ${this.delay} and dropBuffer$2()`); this.dropBuffer$2(); } } else { if (this.hasIframeInBufferList() && this.isPushDropping === false) { this.player.debug.warn('CommonDemux', `useWCS, page visibility is false and history interval diff is ${this.historyIntervalDiffTimeList.join(',')} and bufferList is ${this.bufferList.length}, delay is ${this.delay} and dropBuffer$2()`); this.dropBuffer$2(); } } } } if (options.type === MEDIA_TYPE.video) { if (this.preTimestamp > 0) { this.preTimestampDuration = options.ts - this.preTimestamp; } this.prevPayloadBufferSize = payload.byteLength; this.preTimestamp = options.ts; } } if (isPushBuffer) { // 音频 if (options.type === MEDIA_TYPE.audio) { // this.player.debug.warn('CommonDemux', `pushBuffer audio ts is ${options.ts}`); this.bufferList.push({ ts: options.ts, payload: payload, type: MEDIA_TYPE.audio }); } else if (options.type === MEDIA_TYPE.video) { // this.player.debug.error('CommonDemux', `pushBuffer video ts is ${options.ts}`); this.bufferList.push({ ts: options.ts, cts: options.cts, payload: payload, type: MEDIA_TYPE.video, isIFrame: options.isIFrame }); } } else { if (options.type === MEDIA_TYPE.video) { this._doDecoderDecode({ ts: options.ts, cts: options.cts, payload: payload, type: MEDIA_TYPE.video, isIFrame: options.isIFrame }); } else if (options.type === MEDIA_TYPE.audio) { this._doDecoderDecode({ ts: options.ts, payload: payload, type: MEDIA_TYPE.audio }); } } // for playback if (this.player.isPlaybackCacheBeforeDecodeForFpsRender()) { if (isEmpty(this.playbackStreamVideoFps) || isEmpty(this.playbackStreamAudioFps)) { let playbackStreamVideoFps = this.playbackStreamVideoFps; let playbackStreamAudioFps = this.playbackStreamAudioFps; if (isEmpty(this.playbackStreamVideoFps)) { playbackStreamVideoFps = calcStreamFpsByBufferList(this.bufferList, MEDIA_TYPE.video); if (playbackStreamVideoFps > 0) { this.playbackStreamVideoFps = playbackStreamVideoFps; if (this.player.video) { this.player.video.setStreamFps(this.playbackStreamVideoFps); } if (playbackStreamAudioFps) { this.playbackStreamFps = playbackStreamVideoFps + playbackStreamAudioFps; } else { this.playbackStreamFps = playbackStreamVideoFps; } if (isFalse(this.player._opt.hasAudio)) { this.player.debug.log(this.TAG_NAME, 'playbackCacheBeforeDecodeForFpsRender,_opt.hasAudio is false and set streamAudioFps is 0'); this.playbackStreamAudioFps = 0; } this.initPlaybackCacheLoop(); } } if (isEmpty(this.playbackStreamAudioFps)) { playbackStreamAudioFps = calcStreamFpsByBufferList(this.bufferList, MEDIA_TYPE.audio); if (playbackStreamAudioFps > 0) { this.playbackStreamAudioFps = playbackStreamAudioFps; if (playbackStreamVideoFps) { this.playbackStreamFps = playbackStreamVideoFps + playbackStreamAudioFps; } else { this.playbackStreamFps = playbackStreamAudioFps; } this.initPlaybackCacheLoop(); } } if (isEmpty(this.playbackStreamVideoFps) && isEmpty(this.playbackStreamAudioFps)) { const tsList = this.bufferList.map(item => { return { type: item.type, ts: item.ts }; }); this.player.debug.log('CommonDemux', `playbackCacheBeforeDecodeForFpsRender, calc streamAudioFps is ${playbackStreamAudioFps}, streamVideoFps is ${playbackStreamVideoFps}, bufferListLength is ${this.bufferList.length} and ts list is ${JSON.stringify(tsList)}`); } const hasAudio = this.getAudioBufferLength() > 0; const maxBufferLength = hasAudio ? 60 : 40; if (this.bufferList.length >= maxBufferLength) { this.debug.warn('CommonDemux', `playbackCacheBeforeDecodeForFpsRender, bufferListLength is ${this.bufferList.length} more than ${maxBufferLength}, and hasAudio is ${hasAudio} an set streamFps is 25`); this.playbackStreamVideoFps = playbackStreamVideoFps; if (this.player.video) { this.player.video.setStreamFps(this.playbackStreamVideoFps); } if (hasAudio) { this.playbackStreamAudioFps = 25; this.playbackStreamFps = this.playbackStreamVideoFps + this.playbackStreamAudioFps; } else { this.playbackStreamFps = this.playbackStreamVideoFps; } this.initPlaybackCacheLoop(); } } } } dropBuffer$2() { if (this.bufferList.length > 0) { let iFrameIndex = this.bufferList.findIndex(bufferItem => { return isTrue(bufferItem.isIFrame) && bufferItem.type === MEDIA_TYPE.video; }); if (this.isAllIframeInBufferList()) { for (let i = 0; i < this.bufferList.length; i++) { const bufferItem = this.bufferList[i]; const tempDelay = this.getDelayNotUpdateDelay(bufferItem.ts, bufferItem.type); if (tempDelay >= this.getNotDroppingDelayTs()) { this.player.debug.log('CommonDemux', `dropBuffer$2() isAllIframeInBufferList() is true, and index is ${i} and tempDelay is ${tempDelay} and notDroppingDelayTs is ${this.getNotDroppingDelayTs()}`); iFrameIndex = i; break; } } } if (iFrameIndex >= 0) { this.isPushDropping = true; this.player.updateStats({ isDropping: true }); // old bufferList length const bufferListLength = this.bufferList.length; this.bufferList = this.bufferList.slice(iFrameIndex); const iFrameItem = this.bufferList.shift(); this.resetAllDelay(); this.getDelay(iFrameItem.ts, iFrameItem.type); this._doDecoderDecode(iFrameItem); this.isPushDropping = false; this.player.debug.log('CommonDemux', `dropBuffer$2() iFrameIndex is ${iFrameIndex},and old bufferList length is ${bufferListLength} ,and new bufferList length is ${this.bufferList.length} and new delay is ${this.delay} `); } else { this.isPushDropping = false; } } if (this.bufferList.length === 0) { this.isPushDropping = false; } } clearBuffer() { let needClear = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; this.player.debug.log('CommonDemux', `clearBuffer,buffer length is ${this.bufferList.length}, need clear is ${needClear} and _opt.checkFirstIFrame is ${this.player._opt.checkFirstIFrame}`); if (needClear) { this.bufferList = []; } if (this.player.isPlayer()) { this.resetAllDelay(); if (isTrue(this.player._opt.checkFirstIFrame)) { this.dropping = true; this.player.updateStats({ isDropping: true }); } } // waiting for i frame this.player.decoderCheckFirstIFrame(); } calcNetworkDelay(dts) { // if (!(this.player.isDemuxDecodeFirstIIframeInit() && dts > 0)) { return; } if (this.bufferStartDts === null) { this.bufferStartDts = dts; this.bufferStartLocalTs = now$2(); } else { // if (dts < this.bufferStartDts) { this.player.debug.warn('CommonDemux', `calcNetworkDelay dts is ${dts} and bufferStartDts is ${this.bufferStartDts}`); this.bufferStartDts = dts; this.bufferStartLocalTs = now$2(); } } let diff1 = dts - this.bufferStartDts; let localDiff = now$2() - this.bufferStartLocalTs; let delay = localDiff > diff1 ? localDiff - diff1 : 0; // just for if (delay > this.player._opt.networkDelay && this.player._opt.playType === PLAY_TYPE.player) { this.player.debug.warn('CommonDemux', `delay is more than networkDelay and now dts:${dts},start dts is ${this.bufferStartDts}, vs start is ${diff1},local diff is ${localDiff} ,delay is ${delay}, _opt.networkDelay is ${this.player._opt.networkDelay}`); this.player.emit(EVENTS.networkDelayTimeout, delay); } this.player.updateStats({ netBuf: delay }); } calcIframeIntervalTimestamp(ts) { if (this.preIframeTs === null) { this.preIframeTs = ts; } else { if (this.preIframeTs < ts) { const intervalTimestamp = ts - this.preIframeTs; if (this.player) { this.player.videoIframeIntervalTs = intervalTimestamp; } // post 到主线程里面去。 this.preIframeTs = ts; } } } // 计算流的fps calcBufferFps(ts) {} getNotDroppingDelayTs() { return this.player._opt.videoBuffer + this.player._opt.videoBufferDelay / 2; } getMaxDelayTs() { return this.player._opt.videoBuffer + this.player._opt.videoBufferDelay; } getPushLatestDelay() { return this.pushLatestDelay; } getVideoBufferLength() { let result = 0; this.bufferList.forEach(item => { if (item.type === MEDIA_TYPE.video) { result += 1; } }); return result; } getAudioBufferLength() { let result = 0; this.bufferList.forEach(item => { if (item.type === MEDIA_TYPE.audio) { result += 1; } }); return result; } hasIframeInBufferList() { return this.bufferList.some(item => { return item.type === MEDIA_TYPE.video && item.isIFrame; }); } isAllIframeInBufferList() { const videoBufferLength = this.getVideoBufferLength(); let iFrameLength = 0; this.bufferList.forEach(item => { if (item.type === MEDIA_TYPE.video && item.isIFrame) { iFrameLength += 1; } }); return videoBufferLength === iFrameLength; } getInputByteLength() { return 0; } getIsStreamTsMoreThanLocal() { return this.isStreamTsMoreThanLocal; } close() {} reset() {} findSei(payload, ts) { let isHevc = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; let size = 4; if (isNotEmpty(this.nalUnitSize)) { size = this.nalUnitSize; } const units = parseAvcC(payload.slice(5), size); if (isFalse(isHevc)) { const videoInfo = this.player.getVideoInfo(); if (videoInfo && videoInfo.encType) { isHevc = videoInfo.encType === VIDEO_ENC_TYPE_SHOW.h265; } } units.forEach(unit => { const type = isHevc ? unit[0] >>> 1 & 0x3f : unit[0] & 0x1f; if (isHevc && (type === H265_NAL_TYPE.suffixSei || type === H265_NAL_TYPE.prefixSei) || isFalse(isHevc) && type === H264_NAL_TYPE.kSliceSEI) { this.player.emit(EVENTS.videoSEI, { ts, data: unit }); } }); } handleRecording(payload, type, ts, isIFrame, cts) { if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); if (type === MEDIA_TYPE.video) { this.player.recorder.addVideo(payloadCopy, ts); } else if (type === MEDIA_TYPE.audio) { this.player.recorder.addAudio(payloadCopy, ts); } } else if (this.player.isRecordTypeMp4()) { const payloadCopy = new Uint8Array(payload); if (this.player.recorder.isWasmMp4()) { // wasm mp4 录制 if (type === MEDIA_TYPE.video) { // remove video tag header(1) + CompositionTime(4) this.player.recorder.handleAddNaluTrack(payloadCopy.slice(5), isIFrame, ts, cts); } else if (type === MEDIA_TYPE.audio) { const payloadCopy = new Uint8Array(payload); // aac need remove 2 // other need remove 1 this.player.recorder.handleAddAudioTrack(isAAC(payloadCopy) ? payloadCopy.slice(2) : payloadCopy.slice(1), ts); } } else { // 普通的mp4录制。 if (type === MEDIA_TYPE.video) { this.player.recorder.handleAddNaluTrack(payloadCopy.slice(5), isIFrame, ts, cts); } } } } updateNalUnitSize(payload) { const videoCodec = payload[0] & 0x0F; this.player.video.updateVideoInfo({ encTypeCode: videoCodec }); const isHevc = videoCodec === VIDEO_ENC_CODE.h265; this.nalUnitSize = getUnitSizeFromVideoSequenceHeader(payload, isHevc); this.player.debug.log(this.TAG_NAME, `demux() isVideoSequenceHeader is true and isHevc is ${isHevc} and nalUnitSize is ${this.nalUnitSize}`); } cryptoPayload(payload, isIFrame) { let payloadBuffer = payload; let player = this.player; if (player._opt.isM7sCrypto) { if (player._opt.cryptoKey && player._opt.cryptoKey.byteLength > 0 && player._opt.cryptoIV && player._opt.cryptoIV.byteLength > 0) { const videoInfo = this.player.video.getVideoInfo(); if (videoInfo.encTypeCode) { payloadBuffer = aes256ctrDecrypt(payload, player._opt.cryptoKey, player._opt.cryptoIV, videoInfo.encTypeCode === VIDEO_ENC_CODE.h265); } else { player.debug.warn(this.TAG_NAME, `videoInfo.encTypeCode is ${videoInfo.encTypeCode}`); } } else { player.debug.error(this.TAG_NAME, `isM7sCrypto cryptoKey.length is ${player._opt.cryptoKey && player._opt.cryptoKey.byteLength} or cryptoIV.length is ${player._opt.cryptoIV && player._opt.cryptoIV.byteLength} null`); } } else if (player._opt.isSm4Crypto) { if (player._opt.sm4CryptoKey && isIFrame) { payloadBuffer = sm4Decrypt(payload, player._opt.sm4CryptoKey); } else { if (!player._opt.sm4CryptoKey) { player.debug.error(this.TAG_NAME, `isSm4Crypto opt.sm4CryptoKey is null`); } } } else if (player._opt.isXorCrypto) { if (player._opt.cryptoKey && player._opt.cryptoKey.byteLength > 0 && player._opt.cryptoIV && player._opt.cryptoIV.byteLength > 0) { const videoInfo = this.player.video.getVideoInfo(); payloadBuffer = xorDecrypt(payload, player._opt.cryptoKey, player._opt.cryptoIV, videoInfo.encTypeCode === VIDEO_ENC_CODE.h265); } else { player.debug.error(this.TAG_NAME, `isXorCrypto opt.xorCryptoKey is null`); } } return payloadBuffer; } cryptoPayloadAudio(payload) { let payloadBuffer = payload; let player = this.player; if (player._opt.isM7sCrypto) { if (player._opt.cryptoKey && player._opt.cryptoKey.byteLength > 0 && player._opt.cryptoIV && player._opt.cryptoIV.byteLength > 0) { const codecId = payload[0] >> 4; if (codecId === AUDIO_ENC_CODE.AAC) { payloadBuffer = aes256ctrDecryptAacAudio(payload, player._opt.cryptoKey, player._opt.cryptoIV); } } else { player.debug.error(this.TAG_NAME, `isM7sCrypto cryptoKey.length is ${player._opt.cryptoKey && player._opt.cryptoKey.byteLength} or cryptoIV.length is ${player._opt.cryptoIV && player._opt.cryptoIV.byteLength} null`); } } return payloadBuffer; } _decodeEnhancedH265Video(payload, dts) { const flags = payload[0]; const frameTypeEx = flags & 0x30; const packetEx = flags & 0x0F; const codecId = payload.slice(1, 5); const tmp = new ArrayBuffer(4); const tmp32 = new Uint32Array(tmp); // 'av01' 'hvc1' const isAV1 = String.fromCharCode(codecId[0]) == 'a'; if (packetEx === PACKET_TYPE_EX.PACKET_TYPE_SEQ_START) { if (frameTypeEx === FRAME_TYPE_EX.FT_KEY) { // header video info const extraData = payload.slice(5); if (isAV1) ; else { const payloadBuffer = new Uint8Array(5 + extraData.length); payloadBuffer.set([0x1c, 0x00, 0x00, 0x00, 0x00], 0); payloadBuffer.set(extraData, 5); this.updateNalUnitSize(payloadBuffer); this.player.debug.log(this.TAG_NAME, `demux() isVideoSequenceHeader(enhancedH265) is true and nalUnitSize is ${this.nalUnitSize}`); this._doDecode(payloadBuffer, MEDIA_TYPE.video, 0, true, 0); } } } else if (packetEx === PACKET_TYPE_EX.PACKET_TYPE_FRAMES) { let payloadBuffer = payload; let cts = 0; const isIFrame = frameTypeEx === FRAME_TYPE_EX.FT_KEY; if (isIFrame) { this.calcIframeIntervalTimestamp(dts); } if (isAV1) ; else { // h265 tmp32[0] = payload[4]; tmp32[1] = payload[3]; tmp32[2] = payload[2]; tmp32[3] = 0; cts = tmp32[0]; const data = payload.slice(8); payloadBuffer = hevcEncoderNalePacketNotLength(data, isIFrame); payloadBuffer = this.cryptoPayload(payloadBuffer, isIFrame); this._doDecode(payloadBuffer, MEDIA_TYPE.video, dts, isIFrame, cts); } } else if (packetEx === PACKET_TYPE_EX.PACKET_TYPE_FRAMESX) { const isIFrame = frameTypeEx === FRAME_TYPE_EX.FT_KEY; const data = payload.slice(5); if (isIFrame) { this.calcIframeIntervalTimestamp(dts); } let payloadBuffer = hevcEncoderNalePacketNotLength(data, isIFrame); payloadBuffer = this.cryptoPayload(payloadBuffer, isIFrame); this._doDecode(payloadBuffer, MEDIA_TYPE.video, dts, isIFrame, 0); } } _isEnhancedH265Header(flags) { return (flags & FRAME_HEADER_EX) === FRAME_HEADER_EX; } } var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; const U32 = Symbol(32); const U16 = Symbol(16); const U8 = Symbol(8); class OPut { constructor(g) { this.g = g; this.consumed = 0; if (g) this.need = g.next().value; } setG(g) { this.g = g; this.demand(g.next().value, true); } consume() { if (this.buffer && this.consumed) { this.buffer.copyWithin(0, this.consumed); this.buffer = this.buffer.subarray(0, this.buffer.length - this.consumed); this.consumed = 0; } } demand(n, consume) { if (consume) this.consume(); this.need = n; return this.flush(); } read(need) { return __awaiter(this, void 0, void 0, function* () { if (this.lastReadPromise) { yield this.lastReadPromise; } return this.lastReadPromise = new Promise((resolve, reject) => { var _a; this.reject = reject; this.resolve = (data) => { delete this.lastReadPromise; delete this.resolve; delete this.need; resolve(data); }; const result = this.demand(need, true); if (!result) (_a = this.pull) === null || _a === void 0 ? void 0 : _a.call(this, need); //已饥饿,等待数据 }); }); } readU32() { return this.read(U32); } readU16() { return this.read(U16); } readU8() { return this.read(U8); } close() { var _a; if (this.g) this.g.return(); if (this.buffer) this.buffer.subarray(0, 0); (_a = this.reject) === null || _a === void 0 ? void 0 : _a.call(this, new Error('EOF')); delete this.lastReadPromise; } flush() { if (!this.buffer || !this.need) return; let returnValue = null; const unread = this.buffer.subarray(this.consumed); let n = 0; const notEnough = (x) => unread.length < (n = x); if (typeof this.need === 'number') { if (notEnough(this.need)) return; returnValue = unread.subarray(0, n); } else if (this.need === U32) { if (notEnough(4)) return; returnValue = (unread[0] << 24) | (unread[1] << 16) | (unread[2] << 8) | unread[3]; } else if (this.need === U16) { if (notEnough(2)) return; returnValue = (unread[0] << 8) | unread[1]; } else if (this.need === U8) { if (notEnough(1)) return; returnValue = unread[0]; } else if (!('buffer' in this.need)) { if (notEnough(this.need.byteLength)) return; new Uint8Array(this.need).set(unread.subarray(0, n)); returnValue = this.need; } else if ('byteOffset' in this.need) { if (notEnough(this.need.byteLength - this.need.byteOffset)) return; new Uint8Array(this.need.buffer, this.need.byteOffset).set(unread.subarray(0, n)); returnValue = this.need; } else if (this.g) { this.g.throw(new Error('Unsupported type')); return; } this.consumed += n; if (this.g) this.demand(this.g.next(returnValue).value, true); else if (this.resolve) this.resolve(returnValue); return returnValue; } write(value) { if (value instanceof Uint8Array) { this.malloc(value.length).set(value); } else if ('buffer' in value) { this.malloc(value.byteLength).set(new Uint8Array(value.buffer, value.byteOffset, value.byteLength)); } else { this.malloc(value.byteLength).set(new Uint8Array(value)); } if (this.g || this.resolve) this.flush(); //富余,需要等到饥饿 else return new Promise((resolve) => this.pull = resolve); } writeU32(value) { this.malloc(4).set([(value >> 24) & 0xff, (value >> 16) & 0xff, (value >> 8) & 0xff, value & 0xff]); this.flush(); } writeU16(value) { this.malloc(2).set([(value >> 8) & 0xff, value & 0xff]); this.flush(); } writeU8(value) { this.malloc(1)[0] = value; this.flush(); } malloc(size) { if (this.buffer) { const l = this.buffer.length; const nl = l + size; if (nl <= this.buffer.buffer.byteLength - this.buffer.byteOffset) { this.buffer = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset, nl); } else { const n = new Uint8Array(nl); n.set(this.buffer); this.buffer = n; } return this.buffer.subarray(l, nl); } else { this.buffer = new Uint8Array(size); return this.buffer; } } } OPut.U32 = U32; OPut.U16 = U16; OPut.U8 = U8; class FlvLoader extends CommonLoader { constructor(player) { super(player); this.TAG_NAME = 'FlvDemux'; this.input = new OPut(this.demux()); player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.input = null; this.player.debug.log(this.TAG_NAME, 'destroy'); } dispatch(data) { if (this.input) { this.input.write(data); } else { this.player && this.player.debug.warn(this.TAG_NAME, 'dispatch() this.input is null'); } // this.player.debug.log(this.TAG_NAME, `this.input.buffer.length is ${this.input.buffer.length},byteLength is ${this.input.buffer.byteLength}`); } *demux() { yield 9; const tmp = new ArrayBuffer(4); const tmp8 = new Uint8Array(tmp); const tmp32 = new Uint32Array(tmp); const player = this.player; while (true) { if (!this.input) { return; } tmp8[3] = 0; const t = yield 15; const type = t[4]; tmp8[0] = t[7]; tmp8[1] = t[6]; tmp8[2] = t[5]; const length = tmp32[0]; tmp8[0] = t[10]; tmp8[1] = t[9]; tmp8[2] = t[8]; tmp8[3] = t[11]; let ts = tmp32[0]; const payload = (yield length).slice(); if (!player) { return; } switch (type) { case FLV_MEDIA_TYPE.audio: if (player._opt.hasAudio) { player.updateStats({ abps: payload.byteLength }); if (payload.byteLength > 0) { let payloadBuffer = payload; if (isTrue(this.player._opt.m7sCryptoAudio)) { payloadBuffer = this.cryptoPayloadAudio(payload); } this._doDecode(payloadBuffer, MEDIA_TYPE.audio, ts); } } break; case FLV_MEDIA_TYPE.video: // console.log('flv loader demux', payload); if (player._opt.hasVideo && payload.length >= 6) { let dts = ts; player.updateStats({ vbps: payload.byteLength, dts: dts }); if (!player._times.demuxStart) { player._times.demuxStart = now$2(); } const flags = payload[0]; if (this._isEnhancedH265Header(flags)) { this._decodeEnhancedH265Video(payload, dts); } else { const codecId = flags & 0x0F; const frameType = flags >> 4 & 0x0F; let isIFrame = frameType === VIDEO_FRAME_TYPE.keyFrame; const isH265 = codecId === VIDEO_ENC_CODE.h265; const isH264 = codecId === VIDEO_ENC_CODE.h264; if (isFalse(isH265 || isH264)) { this.player.debug.warn(this.TAG_NAME, `demux() codecId is ${codecId} and ignore`); return; } if (isIFrame) { this.calcIframeIntervalTimestamp(ts); if (this.nalUnitSize === null && isVideoSequenceHeader(payload)) { this.updateNalUnitSize(payload); } } // cts tmp32[0] = payload[4]; tmp32[1] = payload[3]; tmp32[2] = payload[2]; tmp32[3] = 0; let cts = tmp32[0]; let payloadBuffer = this.cryptoPayload(payload, isIFrame); this._doDecode(payloadBuffer, MEDIA_TYPE.video, ts, isIFrame, cts); } } else { if (payload.length < 6) { player.debug.warn(this.TAG_NAME, `payload.length is ${payload.length} less than 6 and ignore`); } } break; case FLV_MEDIA_TYPE.scriptData: if (this.player.isRecordTypeFlv()) { const payloadCopy = new Uint8Array(payload); this.player.recorder.addMetaData(payloadCopy); } const scriptObj = parseFlvScriptData(payload); if (scriptObj && scriptObj.onMetaData) { player.updateMetaData(scriptObj.onMetaData); } break; default: player.debug.log(this.TAG_NAME, `demux() type is ${type}`); break; } } } close() { this.input = null; } getInputByteLength() { let result = 0; if (this.input && this.input.buffer) { result = this.input.buffer.byteLength; } return result; } } class M7sLoader extends CommonLoader { constructor(player) { super(player); this.TAG_NAME = 'M7sDemux'; player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.TAG_NAME, 'destroy'); } dispatch(data) { const player = this.player; const dv = new DataView(data); const type = dv.getUint8(0); const ts = dv.getUint32(1, false); const tmp = new ArrayBuffer(4); const tmp32 = new Uint32Array(tmp); switch (type) { case MEDIA_TYPE.audio: if (player._opt.hasAudio) { const payload = new Uint8Array(data, 5); player.updateStats({ abps: payload.byteLength }); if (payload.byteLength > 0) { this._doDecode(payload, type, ts); } } break; case MEDIA_TYPE.video: if (player._opt.hasVideo) { if (!player._times.demuxStart) { player._times.demuxStart = now$2(); } if (dv.byteLength >= 5 + 6) { const payload = new Uint8Array(data, 5); let dts = ts; player.updateStats({ vbps: payload.byteLength, dts: dts }); const flag = payload[0]; if (this._isEnhancedH265Header(flag)) { this._decodeEnhancedH265Video(payload, ts); } else { const isIFrame = dv.getUint8(5) >> 4 === 1; if (isIFrame) { this.calcIframeIntervalTimestamp(ts); } tmp32[0] = payload[4]; tmp32[1] = payload[3]; tmp32[2] = payload[2]; tmp32[3] = 0; let cts = tmp32[0]; let payloadBuffer = this.cryptoPayload(payload, isIFrame); this._doDecode(payloadBuffer, type, ts, isIFrame, cts); } } else { this.player.debug.warn(this.TAG_NAME, 'dispatch', 'dv byteLength is', dv.byteLength, 'and return'); } } break; } } } class WebTransportLoader extends FlvLoader { constructor(player) { super(player); player.debug.log('WebTransportDemux', 'init'); } destroy() { this.player.debug.log('WebTransportDemux', 'destroy'); super.destroy(); } } var defineProperty = createCommonjsModule(function (module) { function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } module.exports = _defineProperty, module.exports.__esModule = true, module.exports["default"] = module.exports; }); var _defineProperty$1 = unwrapExports(defineProperty); class NakedFlowLoader extends CommonLoader { constructor(player) { super(player); _defineProperty$1(this, "TAG_NAME", 'NakedFlowDemux'); this.lastBuf = null; this.vps = null; this.sps = null; this.pps = null; this.streamVideoType = null; this.streamAudioType = null; this.tempNaluBufferList = new Uint8Array(0); this.localDts = 0; this.isSendSeqHeader = false; this.isSendAACSeqHeader = false; player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.lastBuf = null; this.vps = null; this.sps = null; this.pps = null; this.streamVideoType = null; this.streamAudioType = null; this.tempNaluBufferList = new Uint8Array(0); this.localDts = 0; this.localAudioDts = 0; this.isSendSeqHeader = false; this.isSendAACSeqHeader = false; this.player.debug.log(this.TAG_NAME, 'destroy'); } // dispatch(data) { this.player; const uint8Array = new Uint8Array(data); // player.debug.log(this.TAG_NAME, 'dispatch', uint8Array.byteLength); // const naluArray = this.extractNALu(uint8Array); // console.log('naluArray', naluArray); this.extractNALu$2(uint8Array); // this.addNaluToBuffer(uint8Array); // this.handleNALu(uint8Array); } addNaluToBuffer(nalu) { const len = nalu.byteLength + this.tempNaluBufferList.byteLength; const newBuffer = new Uint8Array(len); newBuffer.set(this.tempNaluBufferList, 0); newBuffer.set(nalu, this.tempNaluBufferList.byteLength); this.tempNaluBufferList = newBuffer; //console.log('addNaluToBuffer byteLength is', this.tempNaluBufferList.byteLength); } downloadNakedFlowFile() { const blob = new Blob([this.tempNaluBufferList]); try { const oa = document.createElement('a'); oa.href = window.URL.createObjectURL(blob); oa.download = Date.now() + '.h264'; oa.click(); window.URL.revokeObjectURL(oa.href); } catch (e) { console.error('downloadTempNalu', e); } } // getNaluDts() { const nakedFlowFps = this.player._opt.nakedFlowFps; // console.log(`nakedFlowFps is ${nakedFlowFps} and dts is ${(1000 / nakedFlowFps)}`); this.localDts = this.localDts + parseInt(1000 / nakedFlowFps); return this.localDts; } getNaluAudioDts() { const audioContextSampleRate = this.player.audio.audioContext.sampleRate; const audioBufferSize = this.player.audio.audioBufferSize; // dts return this.localDts + parseInt(audioBufferSize / audioContextSampleRate * 1000); } // extractNALu(buffer) { let i = 0, length = buffer.byteLength, value, state = 0, result = [], lastIndex; while (i < length) { value = buffer[i++]; // Annex-B格式使用start code进行分割,start code为0x000001或0x00000001, // SPS/PPS作为一般NALU单元以start code作为分隔符的方式放在文件或者直播流的头部。 // finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01) switch (state) { case 0: if (value === 0) { state = 1; } break; case 1: if (value === 0) { state = 2; } else { state = 0; } break; case 2: case 3: if (value === 0) { state = 3; } else if (value === 1 && i < length) { buffer[i] & 0x1f; if (lastIndex) { result.push(buffer.subarray(lastIndex, i - state - 1)); // console.error('lastUnitType', lastUnitType); } lastIndex = i; state = 0; } else { state = 0; } break; } } if (lastIndex) { result.push(buffer.subarray(lastIndex, length)); // console.error('lastUnitType', lastUnitType); } return result; } extractNALu$2(buffer) { let typedArray = null; if (!buffer || buffer.byteLength < 1) return; if (this.lastBuf) { typedArray = new Uint8Array(buffer.byteLength + this.lastBuf.length); typedArray.set(this.lastBuf); typedArray.set(new Uint8Array(buffer), this.lastBuf.length); } else { typedArray = new Uint8Array(buffer); } let lastNalEndPos = 0; let b1 = -1; // byte before one let b2 = -2; // byte before two const nalStartPos = new Array(); for (let i = 0; i < typedArray.length; i += 2) { const b_0 = typedArray[i]; const b_1 = typedArray[i + 1]; if (b1 == 0 && b_0 == 0 && b_1 == 0) { nalStartPos.push(i - 1); } else if (b_1 == 1 && b_0 == 0 && b1 == 0 && b2 == 0) { nalStartPos.push(i - 2); } b2 = b_0; b1 = b_1; } if (nalStartPos.length > 1) { for (let i = 0; i < nalStartPos.length - 1; ++i) { const naluItem = typedArray.subarray(nalStartPos[i], nalStartPos[i + 1] + 1); this.handleNALu(naluItem); //console.log('nakedFlowDemuxer.lastBuf nalType', this.lastBuf.byteLength); lastNalEndPos = nalStartPos[i + 1]; } } else { lastNalEndPos = nalStartPos[0]; } if (lastNalEndPos != 0 && lastNalEndPos < typedArray.length) { this.lastBuf = typedArray.subarray(lastNalEndPos); } else { if (!!!this.lastBuf) { this.lastBuf = typedArray; } const _newBuf = new Uint8Array(this.lastBuf.length + buffer.byteLength); _newBuf.set(this.lastBuf); _newBuf.set(new Uint8Array(buffer), this.lastBuf.length); this.lastBuf = _newBuf; } } handleNALu(nalu) { if (nalu.byteLength < 4) { this.player.debug.warn(this.TAG_NAME, `handleNALu nalu byteLength is ${nalu.byteLength} <= 4`); return; } // 0001 去掉前4个字节(start code) nalu = nalu.slice(4); this.handleVideoNalu(nalu); } handleVideoNalu(nalu) { const uint8Array = new Uint8Array(nalu); // todo:这边首帧必须是sps/pps/vps 不能是P帧或者I帧,这个方法会检测失败的。 if (!this.streamVideoType) { this.streamVideoType = checkNaluType(uint8Array); } if (this.streamVideoType === VIDEO_ENC_TYPE_SHOW.h264) { const tempNalu = this.handleAddNaluStartCode(uint8Array); const naluList = this.extractNALu(tempNalu); if (naluList.length === 0) { this.player.debug.warn(this.TAG_NAME, 'handleVideoNalu', 'naluList.length === 0'); return; } const newNaluList = []; naluList.forEach(naluItem => { const nalType = getAvcSeqHeadType(naluItem); if (nalType === H264_NAL_TYPE.pps || nalType === H264_NAL_TYPE.sps) { this.handleVideoH264Nalu(naluItem); } else { if (isNotAvcSeqHead(nalType)) { newNaluList.push(naluItem); } } }); if (newNaluList.length === 1) { this.handleVideoH264Nalu(newNaluList[0]); } else { const isSameNaluType = isSameAvcNaluType(newNaluList); if (isSameNaluType) { const naluType = getAvcSeqHeadType(newNaluList[0]); const isIFrame = isAvcNaluIFrame(naluType); this.handleVideoH264NaluList(newNaluList, isIFrame, naluType); } else { newNaluList.forEach(naluItem => { this.handleVideoH264Nalu(naluItem); }); } } } else if (this.streamVideoType === VIDEO_ENC_TYPE_SHOW.h265) { if (this.player._opt.nakedFlowH265DemuxUseNew) { const tempNalu = this.handleAddNaluStartCode(uint8Array); const naluList = this.extractNALu(tempNalu); if (naluList.length === 0) { this.player.debug.warn(this.TAG_NAME, 'handleVideoNalu', 'h265 naluList.length === 0'); return; } const newNaluList = []; naluList.forEach(naluItem => { const nalType = getHevcSeqHeadType(naluItem); if (nalType === H265_NAL_TYPE.pps || nalType === H265_NAL_TYPE.sps || nalType === H265_NAL_TYPE.vps) { this.handleVideoH265Nalu(naluItem); } else { if (isNotHevcSeqHead(nalType)) { newNaluList.push(naluItem); } } }); if (newNaluList.length === 1) { this.handleVideoH265Nalu(newNaluList[0]); } else { const isSameNaluType = isSameHevcNaluType(newNaluList); if (isSameNaluType) { const naluType = getHevcSeqHeadType(newNaluList[0]); const isIFrame = isHevcNaluIFrame(naluType); this.handleVideoH265NaluList(newNaluList, isIFrame, naluType); } else { newNaluList.forEach(naluItem => { this.handleVideoH265Nalu(naluItem); }); } } } else { const naluType = getHevcSeqHeadType(uint8Array); if (naluType === H265_NAL_TYPE.pps) { this.extractH265PPS(uint8Array); } else { this.handleVideoH265Nalu(uint8Array); } } } else { this.player.debug.error(this.TAG_NAME, ` this.streamVideoType is null`); } } extractH264PPS(nalu) { const tempNalu = this.handleAddNaluStartCode(nalu); const naluList = this.extractNALu(tempNalu); naluList.forEach(naluItem => { const nalType = getAvcSeqHeadType(naluItem); if (isHvcSEIType(nalType)) { this.extractH264SEI(naluItem); } else { this.handleVideoH264Nalu(naluItem); } }); } extractH265PPS(nalu) { const tempNalu = this.handleAddNaluStartCode(nalu); const naluList = this.extractNALu(tempNalu); naluList.forEach(naluItem => { const nalType = getHevcSeqHeadType(naluItem); if (isHevcSEIType(nalType)) { this.extractH265SEI(naluItem); } else { this.handleVideoH265Nalu(naluItem); } }); } extractH264SEI(nalu) { const tempNalu = this.handleAddNaluStartCode(nalu); const naluList = this.extractNALu(tempNalu); naluList.forEach(naluItem => { this.handleVideoH264Nalu(naluItem); }); } extractH265SEI(nalu) { const tempNalu = this.handleAddNaluStartCode(nalu); const naluList = this.extractNALu(tempNalu); //console.log('extractH265SEI', naluList); naluList.forEach(naluItem => { this.handleVideoH265Nalu(naluItem); }); } handleAddNaluStartCode(nalu) { const prefix = [0, 0, 0, 1]; const newNalu = new Uint8Array(nalu.length + prefix.length); newNalu.set(prefix); newNalu.set(nalu, prefix.length); return newNalu; } handleAudioAACNalu(nalu) { if (!nalu || nalu.byteLength < 1) return; if (!this.streamAudioType) { this.streamAudioType = AUDIO_ENC_CODE_SHOW.AAC; } let uint8Array = new Uint8Array(nalu); // 需要取出7个字节的ADTS头。 const accADTSHeader = uint8Array.slice(0, 7); // 移除掉 ADTS头 uint8Array = uint8Array.slice(7); if (!this.isSendAACSeqHeader) { // 七个字节的adts头; // aac 格式 // 需要先发序列帧。然后再发后续的 // 序列帧:asc 序列 // 后续的帧,0 变成 1 就行了。 // 0: Main profile // 1: Low Complexity profile(LC) // 2: Scalable Sampling Rate profile(SSR) const profile = (accADTSHeader[2] & 0xc0) >> 6; // const profile = 0; // const sampleRate = (+audioInfo.sampleRate); const sampleRate = (accADTSHeader[2] & 0x3C) >> 2; // const channel = (+audioInfo.channel); const channel = (accADTSHeader[2] & 0x1) << 2 | (accADTSHeader[3] & 0xc0) >> 6; const config1 = profile << 3 | (sampleRate & 0xe) >> 1; const config2 = (sampleRate & 0x1) << 7 | channel << 3; // 0xAF >> 4 === 10 const temp = [0xAF, 0x00, config1, config2]; const arrayBuffer = new Uint8Array(temp); this.isSendAACSeqHeader = true; this._doDecode(arrayBuffer, MEDIA_TYPE.audio, 0, false, 0); } const dts = this.getNaluAudioDts(); const arrayBuffer = new Uint8Array(uint8Array.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(uint8Array, 2); this._doDecode(arrayBuffer, MEDIA_TYPE.audio, dts, false, 0); } handleAudioG711ANalu(nalu) { if (!nalu || nalu.byteLength < 1) return; if (!this.streamAudioType) { this.streamAudioType = AUDIO_ENC_CODE_SHOW.ALAW; } let uint8Array = new Uint8Array(nalu); const dts = this.getNaluAudioDts(); const arrayBuffer = new Uint8Array(uint8Array.length + 1); arrayBuffer.set([7 << 4 | 1 << 1], 0); arrayBuffer.set(uint8Array, 1); this._doDecode(arrayBuffer, MEDIA_TYPE.audio, dts, false, 0); } handleAudioG711UNalu(nalu) { if (!nalu || nalu.byteLength < 1) return; if (!this.streamAudioType) { this.streamAudioType = AUDIO_ENC_CODE_SHOW.MULAW; } let uint8Array = new Uint8Array(nalu); const dts = this.getNaluAudioDts(); const arrayBuffer = new Uint8Array(uint8Array.length + 1); arrayBuffer.set([8 << 4 | 1 << 1], 0); arrayBuffer.set(uint8Array, 1); this._doDecode(arrayBuffer, MEDIA_TYPE.audio, dts, false, 0); } handleVideoH264Nalu(nalu) { const nalType = getAvcSeqHeadType(nalu); // this.player.debug.log(this.TAG_NAME, `handleVideoH264Nalu anlType is ${nalType}, nalu[0] is ${nalu[0]}`); switch (nalType) { case H264_NAL_TYPE.sps: this.sps = nalu; break; case H264_NAL_TYPE.pps: this.pps = nalu; break; } if (!this.isSendSeqHeader) { if (this.sps && this.pps) { this.isSendSeqHeader = true; const seqHeader = avcEncoderConfigurationRecord$2({ sps: this.sps, pps: this.pps }); this._doDecode(seqHeader, MEDIA_TYPE.video, 0, true, 0); this.sps = null; this.pps = null; } } else { if (this.sps && this.pps) { const seqHeader = avcEncoderConfigurationRecord$2({ sps: this.sps, pps: this.pps }); const dts = this.getNaluDts(); this._doDecode(seqHeader, MEDIA_TYPE.video, dts, true, 0); this.sps = null; this.pps = null; } if (isNotAvcSeqHead(nalType)) { if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } const isIFrame = isAvcNaluIFrame(nalType); const dts = this.getNaluDts(); const packet = avcEncoderNalePacket(nalu, isIFrame); this._preDoDecode(packet, MEDIA_TYPE.video, dts, isIFrame, 0); } else { this.player.debug.warn(this.TAG_NAME, `handleVideoH264Nalu is avc seq head nalType is ${nalType}`); } } } handleVideoH264NaluList(naluList, isIFrame, naluType) { if (this.isSendSeqHeader) { if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } const dts = this.getNaluDts(); const newNalu = naluList.reduce((pre, cur) => { const nalu2 = addNaleHeaderLength(pre); const nalu3 = addNaleHeaderLength(cur); const nalu4 = new Uint8Array(nalu2.byteLength + nalu3.byteLength); nalu4.set(nalu2, 0); nalu4.set(nalu3, nalu2.byteLength); return nalu4; }); const packet = avcEncoderNalePacketNotLength(newNalu, isIFrame); this._preDoDecode(packet, MEDIA_TYPE.video, dts, isIFrame, 0); } else { this.player.debug.warn(this.TAG_NAME, `handleVideoH264NaluList isSendSeqHeader is false`); } } handleVideoH265Nalu(nalu) { const nalType = getHevcSeqHeadType(nalu); switch (nalType) { case H265_NAL_TYPE.vps: this.vps = nalu; break; case H265_NAL_TYPE.sps: this.sps = nalu; break; case H265_NAL_TYPE.pps: this.pps = nalu; break; } if (!this.isSendSeqHeader) { if (this.vps && this.sps && this.pps) { this.isSendSeqHeader = true; const seqHeader = hevcEncoderConfigurationRecord$2({ vps: this.vps, sps: this.sps, pps: this.pps }); this._doDecode(seqHeader, MEDIA_TYPE.video, 0, true, 0); this.vps = null; this.sps = null; this.pps = null; } } else { if (this.vps && this.sps && this.pps) { const seqHeader = hevcEncoderConfigurationRecord$2({ vps: this.vps, sps: this.sps, pps: this.pps }); const dts = this.getNaluDts(); this._doDecode(seqHeader, MEDIA_TYPE.video, dts, true, 0); this.vps = null; this.sps = null; this.pps = null; } if (isNotHevcSeqHead(nalType)) { if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } const isIFrame = isHevcNaluIFrame(nalType); const dts = this.getNaluDts(); const packet = hevcEncoderNalePacket(nalu, isIFrame); this._preDoDecode(packet, MEDIA_TYPE.video, dts, isIFrame, 0); } } } handleVideoH265NaluList(naluList, isIFrame, naluType) { if (this.isSendSeqHeader) { if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } const dts = this.getNaluDts(); const newNalu = naluList.reduce((pre, cur) => { const nalu2 = addNaleHeaderLength(pre); const nalu3 = addNaleHeaderLength(cur); const nalu4 = new Uint8Array(nalu2.byteLength + nalu3.byteLength); nalu4.set(nalu2, 0); nalu4.set(nalu3, nalu2.byteLength); return nalu4; }); const packet = hevcEncoderNalePacketNotLength(newNalu, isIFrame); this._preDoDecode(packet, MEDIA_TYPE.video, dts, isIFrame, 0); } else { this.player.debug.warn(this.TAG_NAME, `handleVideoH265NaluList isSendSeqHeader is false`); } } _preDoDecode(packet, type, dts, isIFrame, cts) { this.player.updateStats({ vbps: packet.byteLength, dts: dts }); if (isIFrame) { this.calcIframeIntervalTimestamp(dts); } this._doDecode(packet, MEDIA_TYPE.video, dts, isIFrame, cts); } getInputByteLength() { let result = 0; if (this.lastBuf) { result = this.lastBuf.byteLength; } return result; } } class EmptyLoader extends CommonLoader { constructor(player) { super(player); this.player = player; player.debug.log('EmptyDemux', 'init'); } destroy() { super.destroy(); this.player.debug.log('EmptyDemux', 'destroy'); } } var mp4box = createCommonjsModule(function (module, exports) { // file:src/log.js /* * Copyright (c) 2012-2013. Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ var Log = function () { var start = new Date(); var LOG_LEVEL_ERROR = 4; var LOG_LEVEL_WARNING = 3; var LOG_LEVEL_INFO = 2; var LOG_LEVEL_DEBUG = 1; var log_level = LOG_LEVEL_ERROR; var logObject = { setLogLevel: function (level) { if (level == this.debug) log_level = LOG_LEVEL_DEBUG;else if (level == this.info) log_level = LOG_LEVEL_INFO;else if (level == this.warn) log_level = LOG_LEVEL_WARNING;else if (level == this.error) log_level = LOG_LEVEL_ERROR;else log_level = LOG_LEVEL_ERROR; }, debug: function (module, msg) { if (console.debug === undefined) { console.debug = console.log; } if (LOG_LEVEL_DEBUG >= log_level) { console.debug("[" + Log.getDurationString(new Date() - start, 1000) + "]", "[" + module + "]", msg); } }, log: function (module, msg) { this.debug(module.msg); }, info: function (module, msg) { if (LOG_LEVEL_INFO >= log_level) { console.info("[" + Log.getDurationString(new Date() - start, 1000) + "]", "[" + module + "]", msg); } }, warn: function (module, msg) { if (LOG_LEVEL_WARNING >= log_level) { console.warn("[" + Log.getDurationString(new Date() - start, 1000) + "]", "[" + module + "]", msg); } }, error: function (module, msg) { if (LOG_LEVEL_ERROR >= log_level) { console.error("[" + Log.getDurationString(new Date() - start, 1000) + "]", "[" + module + "]", msg); } } }; return logObject; }(); /* Helper function to print a duration value in the form H:MM:SS.MS */ Log.getDurationString = function (duration, _timescale) { var neg; /* Helper function to print a number on a fixed number of digits */ function pad(number, length) { var str = '' + number; var a = str.split('.'); while (a[0].length < length) { a[0] = '0' + a[0]; } return a.join('.'); } if (duration < 0) { neg = true; duration = -duration; } else { neg = false; } var timescale = _timescale || 1; var duration_sec = duration / timescale; var hours = Math.floor(duration_sec / 3600); duration_sec -= hours * 3600; var minutes = Math.floor(duration_sec / 60); duration_sec -= minutes * 60; var msec = duration_sec * 1000; duration_sec = Math.floor(duration_sec); msec -= duration_sec * 1000; msec = Math.floor(msec); return (neg ? "-" : "") + hours + ":" + pad(minutes, 2) + ":" + pad(duration_sec, 2) + "." + pad(msec, 3); }; /* Helper function to stringify HTML5 TimeRanges objects */ Log.printRanges = function (ranges) { var length = ranges.length; if (length > 0) { var str = ""; for (var i = 0; i < length; i++) { if (i > 0) str += ","; str += "[" + Log.getDurationString(ranges.start(i)) + "," + Log.getDurationString(ranges.end(i)) + "]"; } return str; } else { return "(empty)"; } }; { exports.Log = Log; } // file:src/stream.js var MP4BoxStream = function (arrayBuffer) { if (arrayBuffer instanceof ArrayBuffer) { this.buffer = arrayBuffer; this.dataview = new DataView(arrayBuffer); } else { throw "Needs an array buffer"; } this.position = 0; }; /************************************************************************* Common API between MultiBufferStream and SimpleStream *************************************************************************/ MP4BoxStream.prototype.getPosition = function () { return this.position; }; MP4BoxStream.prototype.getEndPosition = function () { return this.buffer.byteLength; }; MP4BoxStream.prototype.getLength = function () { return this.buffer.byteLength; }; MP4BoxStream.prototype.seek = function (pos) { var npos = Math.max(0, Math.min(this.buffer.byteLength, pos)); this.position = isNaN(npos) || !isFinite(npos) ? 0 : npos; return true; }; MP4BoxStream.prototype.isEos = function () { return this.getPosition() >= this.getEndPosition(); }; /************************************************************************* Read methods, simimar to DataStream but simpler *************************************************************************/ MP4BoxStream.prototype.readAnyInt = function (size, signed) { var res = 0; if (this.position + size <= this.buffer.byteLength) { switch (size) { case 1: if (signed) { res = this.dataview.getInt8(this.position); } else { res = this.dataview.getUint8(this.position); } break; case 2: if (signed) { res = this.dataview.getInt16(this.position); } else { res = this.dataview.getUint16(this.position); } break; case 3: if (signed) { throw "No method for reading signed 24 bits values"; } else { res = this.dataview.getUint8(this.position) << 16; res |= this.dataview.getUint8(this.position + 1) << 8; res |= this.dataview.getUint8(this.position + 2); } break; case 4: if (signed) { res = this.dataview.getInt32(this.position); } else { res = this.dataview.getUint32(this.position); } break; case 8: if (signed) { throw "No method for reading signed 64 bits values"; } else { res = this.dataview.getUint32(this.position) << 32; res |= this.dataview.getUint32(this.position + 4); } break; default: throw "readInt method not implemented for size: " + size; } this.position += size; return res; } else { throw "Not enough bytes in buffer"; } }; MP4BoxStream.prototype.readUint8 = function () { return this.readAnyInt(1, false); }; MP4BoxStream.prototype.readUint16 = function () { return this.readAnyInt(2, false); }; MP4BoxStream.prototype.readUint24 = function () { return this.readAnyInt(3, false); }; MP4BoxStream.prototype.readUint32 = function () { return this.readAnyInt(4, false); }; MP4BoxStream.prototype.readUint64 = function () { return this.readAnyInt(8, false); }; MP4BoxStream.prototype.readString = function (length) { if (this.position + length <= this.buffer.byteLength) { var s = ""; for (var i = 0; i < length; i++) { s += String.fromCharCode(this.readUint8()); } return s; } else { throw "Not enough bytes in buffer"; } }; MP4BoxStream.prototype.readCString = function () { var arr = []; while (true) { var b = this.readUint8(); if (b !== 0) { arr.push(b); } else { break; } } return String.fromCharCode.apply(null, arr); }; MP4BoxStream.prototype.readInt8 = function () { return this.readAnyInt(1, true); }; MP4BoxStream.prototype.readInt16 = function () { return this.readAnyInt(2, true); }; MP4BoxStream.prototype.readInt32 = function () { return this.readAnyInt(4, true); }; MP4BoxStream.prototype.readInt64 = function () { return this.readAnyInt(8, false); }; MP4BoxStream.prototype.readUint8Array = function (length) { var arr = new Uint8Array(length); for (var i = 0; i < length; i++) { arr[i] = this.readUint8(); } return arr; }; MP4BoxStream.prototype.readInt16Array = function (length) { var arr = new Int16Array(length); for (var i = 0; i < length; i++) { arr[i] = this.readInt16(); } return arr; }; MP4BoxStream.prototype.readUint16Array = function (length) { var arr = new Int16Array(length); for (var i = 0; i < length; i++) { arr[i] = this.readUint16(); } return arr; }; MP4BoxStream.prototype.readUint32Array = function (length) { var arr = new Uint32Array(length); for (var i = 0; i < length; i++) { arr[i] = this.readUint32(); } return arr; }; MP4BoxStream.prototype.readInt32Array = function (length) { var arr = new Int32Array(length); for (var i = 0; i < length; i++) { arr[i] = this.readInt32(); } return arr; }; { exports.MP4BoxStream = MP4BoxStream; } // file:src/DataStream.js /** DataStream reads scalars, arrays and structs of data from an ArrayBuffer. It's like a file-like DataView on steroids. @param {ArrayBuffer} arrayBuffer ArrayBuffer to read from. @param {?Number} byteOffset Offset from arrayBuffer beginning for the DataStream. @param {?Boolean} endianness DataStream.BIG_ENDIAN or DataStream.LITTLE_ENDIAN (the default). */ var DataStream = function (arrayBuffer, byteOffset, endianness) { this._byteOffset = byteOffset || 0; if (arrayBuffer instanceof ArrayBuffer) { this.buffer = arrayBuffer; } else if (typeof arrayBuffer == "object") { this.dataView = arrayBuffer; if (byteOffset) { this._byteOffset += byteOffset; } } else { this.buffer = new ArrayBuffer(arrayBuffer || 0); } this.position = 0; this.endianness = endianness == null ? DataStream.LITTLE_ENDIAN : endianness; }; DataStream.prototype = {}; DataStream.prototype.getPosition = function () { return this.position; }; /** Internal function to resize the DataStream buffer when required. @param {number} extra Number of bytes to add to the buffer allocation. @return {null} */ DataStream.prototype._realloc = function (extra) { if (!this._dynamicSize) { return; } var req = this._byteOffset + this.position + extra; var blen = this._buffer.byteLength; if (req <= blen) { if (req > this._byteLength) { this._byteLength = req; } return; } if (blen < 1) { blen = 1; } while (req > blen) { blen *= 2; } var buf = new ArrayBuffer(blen); var src = new Uint8Array(this._buffer); var dst = new Uint8Array(buf, 0, src.length); dst.set(src); this.buffer = buf; this._byteLength = req; }; /** Internal function to trim the DataStream buffer when required. Used for stripping out the extra bytes from the backing buffer when the virtual byteLength is smaller than the buffer byteLength (happens after growing the buffer with writes and not filling the extra space completely). @return {null} */ DataStream.prototype._trimAlloc = function () { if (this._byteLength == this._buffer.byteLength) { return; } var buf = new ArrayBuffer(this._byteLength); var dst = new Uint8Array(buf); var src = new Uint8Array(this._buffer, 0, dst.length); dst.set(src); this.buffer = buf; }; /** Big-endian const to use as default endianness. @type {boolean} */ DataStream.BIG_ENDIAN = false; /** Little-endian const to use as default endianness. @type {boolean} */ DataStream.LITTLE_ENDIAN = true; /** Virtual byte length of the DataStream backing buffer. Updated to be max of original buffer size and last written size. If dynamicSize is false is set to buffer size. @type {number} */ DataStream.prototype._byteLength = 0; /** Returns the byte length of the DataStream object. @type {number} */ Object.defineProperty(DataStream.prototype, 'byteLength', { get: function () { return this._byteLength - this._byteOffset; } }); /** Set/get the backing ArrayBuffer of the DataStream object. The setter updates the DataView to point to the new buffer. @type {Object} */ Object.defineProperty(DataStream.prototype, 'buffer', { get: function () { this._trimAlloc(); return this._buffer; }, set: function (v) { this._buffer = v; this._dataView = new DataView(this._buffer, this._byteOffset); this._byteLength = this._buffer.byteLength; } }); /** Set/get the byteOffset of the DataStream object. The setter updates the DataView to point to the new byteOffset. @type {number} */ Object.defineProperty(DataStream.prototype, 'byteOffset', { get: function () { return this._byteOffset; }, set: function (v) { this._byteOffset = v; this._dataView = new DataView(this._buffer, this._byteOffset); this._byteLength = this._buffer.byteLength; } }); /** Set/get the backing DataView of the DataStream object. The setter updates the buffer and byteOffset to point to the DataView values. @type {Object} */ Object.defineProperty(DataStream.prototype, 'dataView', { get: function () { return this._dataView; }, set: function (v) { this._byteOffset = v.byteOffset; this._buffer = v.buffer; this._dataView = new DataView(this._buffer, this._byteOffset); this._byteLength = this._byteOffset + v.byteLength; } }); /** Sets the DataStream read/write position to given position. Clamps between 0 and DataStream length. @param {number} pos Position to seek to. @return {null} */ DataStream.prototype.seek = function (pos) { var npos = Math.max(0, Math.min(this.byteLength, pos)); this.position = isNaN(npos) || !isFinite(npos) ? 0 : npos; }; /** Returns true if the DataStream seek pointer is at the end of buffer and there's no more data to read. @return {boolean} True if the seek pointer is at the end of the buffer. */ DataStream.prototype.isEof = function () { return this.position >= this._byteLength; }; /** Maps a Uint8Array into the DataStream buffer. Nice for quickly reading in data. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Uint8Array to the DataStream backing buffer. */ DataStream.prototype.mapUint8Array = function (length) { this._realloc(length * 1); var arr = new Uint8Array(this._buffer, this.byteOffset + this.position, length); this.position += length * 1; return arr; }; /** Reads an Int32Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Int32Array. */ DataStream.prototype.readInt32Array = function (length, e) { length = length == null ? this.byteLength - this.position / 4 : length; var arr = new Int32Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads an Int16Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Int16Array. */ DataStream.prototype.readInt16Array = function (length, e) { length = length == null ? this.byteLength - this.position / 2 : length; var arr = new Int16Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads an Int8Array of desired length from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Int8Array. */ DataStream.prototype.readInt8Array = function (length) { length = length == null ? this.byteLength - this.position : length; var arr = new Int8Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); this.position += arr.byteLength; return arr; }; /** Reads a Uint32Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Uint32Array. */ DataStream.prototype.readUint32Array = function (length, e) { length = length == null ? this.byteLength - this.position / 4 : length; var arr = new Uint32Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads a Uint16Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Uint16Array. */ DataStream.prototype.readUint16Array = function (length, e) { length = length == null ? this.byteLength - this.position / 2 : length; var arr = new Uint16Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads a Uint8Array of desired length from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Uint8Array. */ DataStream.prototype.readUint8Array = function (length) { length = length == null ? this.byteLength - this.position : length; var arr = new Uint8Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); this.position += arr.byteLength; return arr; }; /** Reads a Float64Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Float64Array. */ DataStream.prototype.readFloat64Array = function (length, e) { length = length == null ? this.byteLength - this.position / 8 : length; var arr = new Float64Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads a Float32Array of desired length and endianness from the DataStream. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} The read Float32Array. */ DataStream.prototype.readFloat32Array = function (length, e) { length = length == null ? this.byteLength - this.position / 4 : length; var arr = new Float32Array(length); DataStream.memcpy(arr.buffer, 0, this.buffer, this.byteOffset + this.position, length * arr.BYTES_PER_ELEMENT); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += arr.byteLength; return arr; }; /** Reads a 32-bit int from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readInt32 = function (e) { var v = this._dataView.getInt32(this.position, e == null ? this.endianness : e); this.position += 4; return v; }; /** Reads a 16-bit int from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readInt16 = function (e) { var v = this._dataView.getInt16(this.position, e == null ? this.endianness : e); this.position += 2; return v; }; /** Reads an 8-bit int from the DataStream. @return {number} The read number. */ DataStream.prototype.readInt8 = function () { var v = this._dataView.getInt8(this.position); this.position += 1; return v; }; /** Reads a 32-bit unsigned int from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readUint32 = function (e) { var v = this._dataView.getUint32(this.position, e == null ? this.endianness : e); this.position += 4; return v; }; /** Reads a 16-bit unsigned int from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readUint16 = function (e) { var v = this._dataView.getUint16(this.position, e == null ? this.endianness : e); this.position += 2; return v; }; /** Reads an 8-bit unsigned int from the DataStream. @return {number} The read number. */ DataStream.prototype.readUint8 = function () { var v = this._dataView.getUint8(this.position); this.position += 1; return v; }; /** Reads a 32-bit float from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readFloat32 = function (e) { var v = this._dataView.getFloat32(this.position, e == null ? this.endianness : e); this.position += 4; return v; }; /** Reads a 64-bit float from the DataStream with the desired endianness. @param {?boolean} e Endianness of the number. @return {number} The read number. */ DataStream.prototype.readFloat64 = function (e) { var v = this._dataView.getFloat64(this.position, e == null ? this.endianness : e); this.position += 8; return v; }; /** Native endianness. Either DataStream.BIG_ENDIAN or DataStream.LITTLE_ENDIAN depending on the platform endianness. @type {boolean} */ DataStream.endianness = new Int8Array(new Int16Array([1]).buffer)[0] > 0; /** Copies byteLength bytes from the src buffer at srcOffset to the dst buffer at dstOffset. @param {Object} dst Destination ArrayBuffer to write to. @param {number} dstOffset Offset to the destination ArrayBuffer. @param {Object} src Source ArrayBuffer to read from. @param {number} srcOffset Offset to the source ArrayBuffer. @param {number} byteLength Number of bytes to copy. */ DataStream.memcpy = function (dst, dstOffset, src, srcOffset, byteLength) { var dstU8 = new Uint8Array(dst, dstOffset, byteLength); var srcU8 = new Uint8Array(src, srcOffset, byteLength); dstU8.set(srcU8); }; /** Converts array to native endianness in-place. @param {Object} array Typed array to convert. @param {boolean} arrayIsLittleEndian True if the data in the array is little-endian. Set false for big-endian. @return {Object} The converted typed array. */ DataStream.arrayToNative = function (array, arrayIsLittleEndian) { if (arrayIsLittleEndian == this.endianness) { return array; } else { return this.flipArrayEndianness(array); } }; /** Converts native endianness array to desired endianness in-place. @param {Object} array Typed array to convert. @param {boolean} littleEndian True if the converted array should be little-endian. Set false for big-endian. @return {Object} The converted typed array. */ DataStream.nativeToEndian = function (array, littleEndian) { if (this.endianness == littleEndian) { return array; } else { return this.flipArrayEndianness(array); } }; /** Flips typed array endianness in-place. @param {Object} array Typed array to flip. @return {Object} The converted typed array. */ DataStream.flipArrayEndianness = function (array) { var u8 = new Uint8Array(array.buffer, array.byteOffset, array.byteLength); for (var i = 0; i < array.byteLength; i += array.BYTES_PER_ELEMENT) { for (var j = i + array.BYTES_PER_ELEMENT - 1, k = i; j > k; j--, k++) { var tmp = u8[k]; u8[k] = u8[j]; u8[j] = tmp; } } return array; }; /** Seek position where DataStream#readStruct ran into a problem. Useful for debugging struct parsing. @type {number} */ DataStream.prototype.failurePosition = 0; String.fromCharCodeUint8 = function (uint8arr) { var arr = []; for (var i = 0; i < uint8arr.length; i++) { arr[i] = uint8arr[i]; } return String.fromCharCode.apply(null, arr); }; /** Read a string of desired length and encoding from the DataStream. @param {number} length The length of the string to read in bytes. @param {?string} encoding The encoding of the string data in the DataStream. Defaults to ASCII. @return {string} The read string. */ DataStream.prototype.readString = function (length, encoding) { if (encoding == null || encoding == "ASCII") { return String.fromCharCodeUint8.apply(null, [this.mapUint8Array(length == null ? this.byteLength - this.position : length)]); } else { return new TextDecoder(encoding).decode(this.mapUint8Array(length)); } }; /** Read null-terminated string of desired length from the DataStream. Truncates the returned string so that the null byte is not a part of it. @param {?number} length The length of the string to read. @return {string} The read string. */ DataStream.prototype.readCString = function (length) { var blen = this.byteLength - this.position; var u8 = new Uint8Array(this._buffer, this._byteOffset + this.position); var len = blen; if (length != null) { len = Math.min(length, blen); } for (var i = 0; i < len && u8[i] !== 0; i++); // find first zero byte var s = String.fromCharCodeUint8.apply(null, [this.mapUint8Array(i)]); if (length != null) { this.position += len - i; } else if (i != blen) { this.position += 1; // trailing zero if not at end of buffer } return s; }; /* TODO: fix endianness for 24/64-bit fields TODO: check range/support for 64-bits numbers in JavaScript */ var MAX_SIZE = Math.pow(2, 32); DataStream.prototype.readInt64 = function () { return this.readInt32() * MAX_SIZE + this.readUint32(); }; DataStream.prototype.readUint64 = function () { return this.readUint32() * MAX_SIZE + this.readUint32(); }; DataStream.prototype.readInt64 = function () { return this.readUint32() * MAX_SIZE + this.readUint32(); }; DataStream.prototype.readUint24 = function () { return (this.readUint8() << 16) + (this.readUint8() << 8) + this.readUint8(); }; { exports.DataStream = DataStream; } // file:src/DataStream-write.js /** Saves the DataStream contents to the given filename. Uses Chrome's anchor download property to initiate download. @param {string} filename Filename to save as. @return {null} */ DataStream.prototype.save = function (filename) { var blob = new Blob([this.buffer]); if (window.URL && URL.createObjectURL) { var url = window.URL.createObjectURL(blob); var a = document.createElement('a'); // Required in Firefox: document.body.appendChild(a); a.setAttribute('href', url); a.setAttribute('download', filename); // Required in Firefox: a.setAttribute('target', '_self'); a.click(); window.URL.revokeObjectURL(url); } else { throw "DataStream.save: Can't create object URL."; } }; /** Whether to extend DataStream buffer when trying to write beyond its size. If set, the buffer is reallocated to twice its current size until the requested write fits the buffer. @type {boolean} */ DataStream.prototype._dynamicSize = true; Object.defineProperty(DataStream.prototype, 'dynamicSize', { get: function () { return this._dynamicSize; }, set: function (v) { if (!v) { this._trimAlloc(); } this._dynamicSize = v; } }); /** Internal function to trim the DataStream buffer when required. Used for stripping out the first bytes when not needed anymore. @return {null} */ DataStream.prototype.shift = function (offset) { var buf = new ArrayBuffer(this._byteLength - offset); var dst = new Uint8Array(buf); var src = new Uint8Array(this._buffer, offset, dst.length); dst.set(src); this.buffer = buf; this.position -= offset; }; /** Writes an Int32Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeInt32Array = function (arr, e) { this._realloc(arr.length * 4); if (arr instanceof Int32Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapInt32Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeInt32(arr[i], e); } } }; /** Writes an Int16Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeInt16Array = function (arr, e) { this._realloc(arr.length * 2); if (arr instanceof Int16Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapInt16Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeInt16(arr[i], e); } } }; /** Writes an Int8Array to the DataStream. @param {Object} arr The array to write. */ DataStream.prototype.writeInt8Array = function (arr) { this._realloc(arr.length * 1); if (arr instanceof Int8Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapInt8Array(arr.length); } else { for (var i = 0; i < arr.length; i++) { this.writeInt8(arr[i]); } } }; /** Writes a Uint32Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeUint32Array = function (arr, e) { this._realloc(arr.length * 4); if (arr instanceof Uint32Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapUint32Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeUint32(arr[i], e); } } }; /** Writes a Uint16Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeUint16Array = function (arr, e) { this._realloc(arr.length * 2); if (arr instanceof Uint16Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapUint16Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeUint16(arr[i], e); } } }; /** Writes a Uint8Array to the DataStream. @param {Object} arr The array to write. */ DataStream.prototype.writeUint8Array = function (arr) { this._realloc(arr.length * 1); if (arr instanceof Uint8Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapUint8Array(arr.length); } else { for (var i = 0; i < arr.length; i++) { this.writeUint8(arr[i]); } } }; /** Writes a Float64Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeFloat64Array = function (arr, e) { this._realloc(arr.length * 8); if (arr instanceof Float64Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapFloat64Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeFloat64(arr[i], e); } } }; /** Writes a Float32Array of specified endianness to the DataStream. @param {Object} arr The array to write. @param {?boolean} e Endianness of the data to write. */ DataStream.prototype.writeFloat32Array = function (arr, e) { this._realloc(arr.length * 4); if (arr instanceof Float32Array && this.byteOffset + this.position % arr.BYTES_PER_ELEMENT === 0) { DataStream.memcpy(this._buffer, this.byteOffset + this.position, arr.buffer, 0, arr.byteLength); this.mapFloat32Array(arr.length, e); } else { for (var i = 0; i < arr.length; i++) { this.writeFloat32(arr[i], e); } } }; /** Writes a 32-bit int to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeInt32 = function (v, e) { this._realloc(4); this._dataView.setInt32(this.position, v, e == null ? this.endianness : e); this.position += 4; }; /** Writes a 16-bit int to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeInt16 = function (v, e) { this._realloc(2); this._dataView.setInt16(this.position, v, e == null ? this.endianness : e); this.position += 2; }; /** Writes an 8-bit int to the DataStream. @param {number} v Number to write. */ DataStream.prototype.writeInt8 = function (v) { this._realloc(1); this._dataView.setInt8(this.position, v); this.position += 1; }; /** Writes a 32-bit unsigned int to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeUint32 = function (v, e) { this._realloc(4); this._dataView.setUint32(this.position, v, e == null ? this.endianness : e); this.position += 4; }; /** Writes a 16-bit unsigned int to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeUint16 = function (v, e) { this._realloc(2); this._dataView.setUint16(this.position, v, e == null ? this.endianness : e); this.position += 2; }; /** Writes an 8-bit unsigned int to the DataStream. @param {number} v Number to write. */ DataStream.prototype.writeUint8 = function (v) { this._realloc(1); this._dataView.setUint8(this.position, v); this.position += 1; }; /** Writes a 32-bit float to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeFloat32 = function (v, e) { this._realloc(4); this._dataView.setFloat32(this.position, v, e == null ? this.endianness : e); this.position += 4; }; /** Writes a 64-bit float to the DataStream with the desired endianness. @param {number} v Number to write. @param {?boolean} e Endianness of the number. */ DataStream.prototype.writeFloat64 = function (v, e) { this._realloc(8); this._dataView.setFloat64(this.position, v, e == null ? this.endianness : e); this.position += 8; }; /** Write a UCS-2 string of desired endianness to the DataStream. The lengthOverride argument lets you define the number of characters to write. If the string is shorter than lengthOverride, the extra space is padded with zeroes. @param {string} str The string to write. @param {?boolean} endianness The endianness to use for the written string data. @param {?number} lengthOverride The number of characters to write. */ DataStream.prototype.writeUCS2String = function (str, endianness, lengthOverride) { if (lengthOverride == null) { lengthOverride = str.length; } for (var i = 0; i < str.length && i < lengthOverride; i++) { this.writeUint16(str.charCodeAt(i), endianness); } for (; i < lengthOverride; i++) { this.writeUint16(0); } }; /** Writes a string of desired length and encoding to the DataStream. @param {string} s The string to write. @param {?string} encoding The encoding for the written string data. Defaults to ASCII. @param {?number} length The number of characters to write. */ DataStream.prototype.writeString = function (s, encoding, length) { var i = 0; if (encoding == null || encoding == "ASCII") { if (length != null) { var len = Math.min(s.length, length); for (i = 0; i < len; i++) { this.writeUint8(s.charCodeAt(i)); } for (; i < length; i++) { this.writeUint8(0); } } else { for (i = 0; i < s.length; i++) { this.writeUint8(s.charCodeAt(i)); } } } else { this.writeUint8Array(new TextEncoder(encoding).encode(s.substring(0, length))); } }; /** Writes a null-terminated string to DataStream and zero-pads it to length bytes. If length is not given, writes the string followed by a zero. If string is longer than length, the written part of the string does not have a trailing zero. @param {string} s The string to write. @param {?number} length The number of characters to write. */ DataStream.prototype.writeCString = function (s, length) { var i = 0; if (length != null) { var len = Math.min(s.length, length); for (i = 0; i < len; i++) { this.writeUint8(s.charCodeAt(i)); } for (; i < length; i++) { this.writeUint8(0); } } else { for (i = 0; i < s.length; i++) { this.writeUint8(s.charCodeAt(i)); } this.writeUint8(0); } }; /** Writes a struct to the DataStream. Takes a structDefinition that gives the types and a struct object that gives the values. Refer to readStruct for the structure of structDefinition. @param {Object} structDefinition Type definition of the struct. @param {Object} struct The struct data object. */ DataStream.prototype.writeStruct = function (structDefinition, struct) { for (var i = 0; i < structDefinition.length; i += 2) { var t = structDefinition[i + 1]; this.writeType(t, struct[structDefinition[i]], struct); } }; /** Writes object v of type t to the DataStream. @param {Object} t Type of data to write. @param {Object} v Value of data to write. @param {Object} struct Struct to pass to write callback functions. */ DataStream.prototype.writeType = function (t, v, struct) { var tp; if (typeof t == "function") { return t(this, v); } else if (typeof t == "object" && !(t instanceof Array)) { return t.set(this, v, struct); } var lengthOverride = null; var charset = "ASCII"; var pos = this.position; if (typeof t == 'string' && /:/.test(t)) { tp = t.split(":"); t = tp[0]; lengthOverride = parseInt(tp[1]); } if (typeof t == 'string' && /,/.test(t)) { tp = t.split(","); t = tp[0]; charset = parseInt(tp[1]); } switch (t) { case 'uint8': this.writeUint8(v); break; case 'int8': this.writeInt8(v); break; case 'uint16': this.writeUint16(v, this.endianness); break; case 'int16': this.writeInt16(v, this.endianness); break; case 'uint32': this.writeUint32(v, this.endianness); break; case 'int32': this.writeInt32(v, this.endianness); break; case 'float32': this.writeFloat32(v, this.endianness); break; case 'float64': this.writeFloat64(v, this.endianness); break; case 'uint16be': this.writeUint16(v, DataStream.BIG_ENDIAN); break; case 'int16be': this.writeInt16(v, DataStream.BIG_ENDIAN); break; case 'uint32be': this.writeUint32(v, DataStream.BIG_ENDIAN); break; case 'int32be': this.writeInt32(v, DataStream.BIG_ENDIAN); break; case 'float32be': this.writeFloat32(v, DataStream.BIG_ENDIAN); break; case 'float64be': this.writeFloat64(v, DataStream.BIG_ENDIAN); break; case 'uint16le': this.writeUint16(v, DataStream.LITTLE_ENDIAN); break; case 'int16le': this.writeInt16(v, DataStream.LITTLE_ENDIAN); break; case 'uint32le': this.writeUint32(v, DataStream.LITTLE_ENDIAN); break; case 'int32le': this.writeInt32(v, DataStream.LITTLE_ENDIAN); break; case 'float32le': this.writeFloat32(v, DataStream.LITTLE_ENDIAN); break; case 'float64le': this.writeFloat64(v, DataStream.LITTLE_ENDIAN); break; case 'cstring': this.writeCString(v, lengthOverride); break; case 'string': this.writeString(v, charset, lengthOverride); break; case 'u16string': this.writeUCS2String(v, this.endianness, lengthOverride); break; case 'u16stringle': this.writeUCS2String(v, DataStream.LITTLE_ENDIAN, lengthOverride); break; case 'u16stringbe': this.writeUCS2String(v, DataStream.BIG_ENDIAN, lengthOverride); break; default: if (t.length == 3) { var ta = t[1]; for (var i = 0; i < v.length; i++) { this.writeType(ta, v[i]); } break; } else { this.writeStruct(t, v); break; } } if (lengthOverride != null) { this.position = pos; this._realloc(lengthOverride); this.position = pos + lengthOverride; } }; DataStream.prototype.writeUint64 = function (v) { var h = Math.floor(v / MAX_SIZE); this.writeUint32(h); this.writeUint32(v & 0xFFFFFFFF); }; DataStream.prototype.writeUint24 = function (v) { this.writeUint8((v & 0x00FF0000) >> 16); this.writeUint8((v & 0x0000FF00) >> 8); this.writeUint8(v & 0x000000FF); }; DataStream.prototype.adjustUint32 = function (position, value) { var pos = this.position; this.seek(position); this.writeUint32(value); this.seek(pos); }; // file:src/DataStream-map.js /** Maps an Int32Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Int32Array to the DataStream backing buffer. */ DataStream.prototype.mapInt32Array = function (length, e) { this._realloc(length * 4); var arr = new Int32Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 4; return arr; }; /** Maps an Int16Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Int16Array to the DataStream backing buffer. */ DataStream.prototype.mapInt16Array = function (length, e) { this._realloc(length * 2); var arr = new Int16Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 2; return arr; }; /** Maps an Int8Array into the DataStream buffer. Nice for quickly reading in data. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Int8Array to the DataStream backing buffer. */ DataStream.prototype.mapInt8Array = function (length) { this._realloc(length * 1); var arr = new Int8Array(this._buffer, this.byteOffset + this.position, length); this.position += length * 1; return arr; }; /** Maps a Uint32Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Uint32Array to the DataStream backing buffer. */ DataStream.prototype.mapUint32Array = function (length, e) { this._realloc(length * 4); var arr = new Uint32Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 4; return arr; }; /** Maps a Uint16Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Uint16Array to the DataStream backing buffer. */ DataStream.prototype.mapUint16Array = function (length, e) { this._realloc(length * 2); var arr = new Uint16Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 2; return arr; }; /** Maps a Float64Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Float64Array to the DataStream backing buffer. */ DataStream.prototype.mapFloat64Array = function (length, e) { this._realloc(length * 8); var arr = new Float64Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 8; return arr; }; /** Maps a Float32Array into the DataStream buffer, swizzling it to native endianness in-place. The current offset from the start of the buffer needs to be a multiple of element size, just like with typed array views. Nice for quickly reading in data. Warning: potentially modifies the buffer contents. @param {number} length Number of elements to map. @param {?boolean} e Endianness of the data to read. @return {Object} Float32Array to the DataStream backing buffer. */ DataStream.prototype.mapFloat32Array = function (length, e) { this._realloc(length * 4); var arr = new Float32Array(this._buffer, this.byteOffset + this.position, length); DataStream.arrayToNative(arr, e == null ? this.endianness : e); this.position += length * 4; return arr; }; // file:src/buffer.js /** * MultiBufferStream is a class that acts as a SimpleStream for parsing * It holds several, possibly non-contiguous ArrayBuffer objects, each with a fileStart property * containing the offset for the buffer data in an original/virtual file * * It inherits also from DataStream for all read/write/alloc operations */ /** * Constructor */ var MultiBufferStream = function (buffer) { /* List of ArrayBuffers, with a fileStart property, sorted in fileStart order and non overlapping */ this.buffers = []; this.bufferIndex = -1; if (buffer) { this.insertBuffer(buffer); this.bufferIndex = 0; } }; MultiBufferStream.prototype = new DataStream(new ArrayBuffer(), 0, DataStream.BIG_ENDIAN); /************************************************************************************ Methods for the managnement of the buffers (insertion, removal, concatenation, ...) ***********************************************************************************/ MultiBufferStream.prototype.initialized = function () { var firstBuffer; if (this.bufferIndex > -1) { return true; } else if (this.buffers.length > 0) { firstBuffer = this.buffers[0]; if (firstBuffer.fileStart === 0) { this.buffer = firstBuffer; this.bufferIndex = 0; Log.debug("MultiBufferStream", "Stream ready for parsing"); return true; } else { Log.warn("MultiBufferStream", "The first buffer should have a fileStart of 0"); this.logBufferLevel(); return false; } } else { Log.warn("MultiBufferStream", "No buffer to start parsing from"); this.logBufferLevel(); return false; } }; /** * helper functions to concatenate two ArrayBuffer objects * @param {ArrayBuffer} buffer1 * @param {ArrayBuffer} buffer2 * @return {ArrayBuffer} the concatenation of buffer1 and buffer2 in that order */ ArrayBuffer.concat = function (buffer1, buffer2) { Log.debug("ArrayBuffer", "Trying to create a new buffer of size: " + (buffer1.byteLength + buffer2.byteLength)); var tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength); tmp.set(new Uint8Array(buffer1), 0); tmp.set(new Uint8Array(buffer2), buffer1.byteLength); return tmp.buffer; }; /** * Reduces the size of a given buffer, but taking the part between offset and offset+newlength * @param {ArrayBuffer} buffer * @param {Number} offset the start of new buffer * @param {Number} newLength the length of the new buffer * @return {ArrayBuffer} the new buffer */ MultiBufferStream.prototype.reduceBuffer = function (buffer, offset, newLength) { var smallB; smallB = new Uint8Array(newLength); smallB.set(new Uint8Array(buffer, offset, newLength)); smallB.buffer.fileStart = buffer.fileStart + offset; smallB.buffer.usedBytes = 0; return smallB.buffer; }; /** * Inserts the new buffer in the sorted list of buffers, * making sure, it is not overlapping with existing ones (possibly reducing its size). * if the new buffer overrides/replaces the 0-th buffer (for instance because it is bigger), * updates the DataStream buffer for parsing */ MultiBufferStream.prototype.insertBuffer = function (ab) { var to_add = true; /* TODO: improve insertion if many buffers */ for (var i = 0; i < this.buffers.length; i++) { var b = this.buffers[i]; if (ab.fileStart <= b.fileStart) { /* the insertion position is found */ if (ab.fileStart === b.fileStart) { /* The new buffer overlaps with an existing buffer */ if (ab.byteLength > b.byteLength) { /* the new buffer is bigger than the existing one remove the existing buffer and try again to insert the new buffer to check overlap with the next ones */ this.buffers.splice(i, 1); i--; continue; } else { /* the new buffer is smaller than the existing one, just drop it */ Log.warn("MultiBufferStream", "Buffer (fileStart: " + ab.fileStart + " - Length: " + ab.byteLength + ") already appended, ignoring"); } } else { /* The beginning of the new buffer is not overlapping with an existing buffer let's check the end of it */ if (ab.fileStart + ab.byteLength <= b.fileStart) ; else { /* There is some overlap, cut the new buffer short, and add it*/ ab = this.reduceBuffer(ab, 0, b.fileStart - ab.fileStart); } Log.debug("MultiBufferStream", "Appending new buffer (fileStart: " + ab.fileStart + " - Length: " + ab.byteLength + ")"); this.buffers.splice(i, 0, ab); /* if this new buffer is inserted in the first place in the list of the buffer, and the DataStream is initialized, make it the buffer used for parsing */ if (i === 0) { this.buffer = ab; } } to_add = false; break; } else if (ab.fileStart < b.fileStart + b.byteLength) { /* the new buffer overlaps its beginning with the end of the current buffer */ var offset = b.fileStart + b.byteLength - ab.fileStart; var newLength = ab.byteLength - offset; if (newLength > 0) { /* the new buffer is bigger than the current overlap, drop the overlapping part and try again inserting the remaining buffer */ ab = this.reduceBuffer(ab, offset, newLength); } else { /* the content of the new buffer is entirely contained in the existing buffer, drop it entirely */ to_add = false; break; } } } /* if the buffer has not been added, we can add it at the end */ if (to_add) { Log.debug("MultiBufferStream", "Appending new buffer (fileStart: " + ab.fileStart + " - Length: " + ab.byteLength + ")"); this.buffers.push(ab); /* if this new buffer is inserted in the first place in the list of the buffer, and the DataStream is initialized, make it the buffer used for parsing */ if (i === 0) { this.buffer = ab; } } }; /** * Displays the status of the buffers (number and used bytes) * @param {Object} info callback method for display */ MultiBufferStream.prototype.logBufferLevel = function (info) { var i; var buffer; var used, total; var ranges = []; var range; var bufferedString = ""; used = 0; total = 0; for (i = 0; i < this.buffers.length; i++) { buffer = this.buffers[i]; if (i === 0) { range = {}; ranges.push(range); range.start = buffer.fileStart; range.end = buffer.fileStart + buffer.byteLength; bufferedString += "[" + range.start + "-"; } else if (range.end === buffer.fileStart) { range.end = buffer.fileStart + buffer.byteLength; } else { range = {}; range.start = buffer.fileStart; bufferedString += ranges[ranges.length - 1].end - 1 + "], [" + range.start + "-"; range.end = buffer.fileStart + buffer.byteLength; ranges.push(range); } used += buffer.usedBytes; total += buffer.byteLength; } if (ranges.length > 0) { bufferedString += range.end - 1 + "]"; } var log = info ? Log.info : Log.debug; if (this.buffers.length === 0) { log("MultiBufferStream", "No more buffer in memory"); } else { log("MultiBufferStream", "" + this.buffers.length + " stored buffer(s) (" + used + "/" + total + " bytes), continuous ranges: " + bufferedString); } }; MultiBufferStream.prototype.cleanBuffers = function () { var i; var buffer; for (i = 0; i < this.buffers.length; i++) { buffer = this.buffers[i]; if (buffer.usedBytes === buffer.byteLength) { Log.debug("MultiBufferStream", "Removing buffer #" + i); this.buffers.splice(i, 1); i--; } } }; MultiBufferStream.prototype.mergeNextBuffer = function () { var next_buffer; if (this.bufferIndex + 1 < this.buffers.length) { next_buffer = this.buffers[this.bufferIndex + 1]; if (next_buffer.fileStart === this.buffer.fileStart + this.buffer.byteLength) { var oldLength = this.buffer.byteLength; var oldUsedBytes = this.buffer.usedBytes; var oldFileStart = this.buffer.fileStart; this.buffers[this.bufferIndex] = ArrayBuffer.concat(this.buffer, next_buffer); this.buffer = this.buffers[this.bufferIndex]; this.buffers.splice(this.bufferIndex + 1, 1); this.buffer.usedBytes = oldUsedBytes; /* TODO: should it be += ? */ this.buffer.fileStart = oldFileStart; Log.debug("ISOFile", "Concatenating buffer for box parsing (length: " + oldLength + "->" + this.buffer.byteLength + ")"); return true; } else { return false; } } else { return false; } }; /************************************************************************* Seek-related functions *************************************************************************/ /** * Finds the buffer that holds the given file position * @param {Boolean} fromStart indicates if the search should start from the current buffer (false) * or from the first buffer (true) * @param {Number} filePosition position in the file to seek to * @param {Boolean} markAsUsed indicates if the bytes in between the current position and the seek position * should be marked as used for garbage collection * @return {Number} the index of the buffer holding the seeked file position, -1 if not found. */ MultiBufferStream.prototype.findPosition = function (fromStart, filePosition, markAsUsed) { var i; var abuffer = null; var index = -1; /* find the buffer with the largest position smaller than the given position */ if (fromStart === true) { /* the reposition can be in the past, we need to check from the beginning of the list of buffers */ i = 0; } else { i = this.bufferIndex; } while (i < this.buffers.length) { abuffer = this.buffers[i]; if (abuffer.fileStart <= filePosition) { index = i; if (markAsUsed) { if (abuffer.fileStart + abuffer.byteLength <= filePosition) { abuffer.usedBytes = abuffer.byteLength; } else { abuffer.usedBytes = filePosition - abuffer.fileStart; } this.logBufferLevel(); } } else { break; } i++; } if (index !== -1) { abuffer = this.buffers[index]; if (abuffer.fileStart + abuffer.byteLength >= filePosition) { Log.debug("MultiBufferStream", "Found position in existing buffer #" + index); return index; } else { return -1; } } else { return -1; } }; /** * Finds the largest file position contained in a buffer or in the next buffers if they are contiguous (no gap) * starting from the given buffer index or from the current buffer if the index is not given * * @param {Number} inputindex Index of the buffer to start from * @return {Number} The largest file position found in the buffers */ MultiBufferStream.prototype.findEndContiguousBuf = function (inputindex) { var i; var currentBuf; var nextBuf; var index = inputindex !== undefined ? inputindex : this.bufferIndex; currentBuf = this.buffers[index]; /* find the end of the contiguous range of data */ if (this.buffers.length > index + 1) { for (i = index + 1; i < this.buffers.length; i++) { nextBuf = this.buffers[i]; if (nextBuf.fileStart === currentBuf.fileStart + currentBuf.byteLength) { currentBuf = nextBuf; } else { break; } } } /* return the position of last byte in the file that we have */ return currentBuf.fileStart + currentBuf.byteLength; }; /** * Returns the largest file position contained in the buffers, larger than the given position * @param {Number} pos the file position to start from * @return {Number} the largest position in the current buffer or in the buffer and the next contiguous * buffer that holds the given position */ MultiBufferStream.prototype.getEndFilePositionAfter = function (pos) { var index = this.findPosition(true, pos, false); if (index !== -1) { return this.findEndContiguousBuf(index); } else { return pos; } }; /************************************************************************* Garbage collection related functions *************************************************************************/ /** * Marks a given number of bytes as used in the current buffer for garbage collection * @param {Number} nbBytes */ MultiBufferStream.prototype.addUsedBytes = function (nbBytes) { this.buffer.usedBytes += nbBytes; this.logBufferLevel(); }; /** * Marks the entire current buffer as used, ready for garbage collection */ MultiBufferStream.prototype.setAllUsedBytes = function () { this.buffer.usedBytes = this.buffer.byteLength; this.logBufferLevel(); }; /************************************************************************* Common API between MultiBufferStream and SimpleStream *************************************************************************/ /** * Tries to seek to a given file position * if possible, repositions the parsing from there and returns true * if not possible, does not change anything and returns false * @param {Number} filePosition position in the file to seek to * @param {Boolean} fromStart indicates if the search should start from the current buffer (false) * or from the first buffer (true) * @param {Boolean} markAsUsed indicates if the bytes in between the current position and the seek position * should be marked as used for garbage collection * @return {Boolean} true if the seek succeeded, false otherwise */ MultiBufferStream.prototype.seek = function (filePosition, fromStart, markAsUsed) { var index; index = this.findPosition(fromStart, filePosition, markAsUsed); if (index !== -1) { this.buffer = this.buffers[index]; this.bufferIndex = index; this.position = filePosition - this.buffer.fileStart; Log.debug("MultiBufferStream", "Repositioning parser at buffer position: " + this.position); return true; } else { Log.debug("MultiBufferStream", "Position " + filePosition + " not found in buffered data"); return false; } }; /** * Returns the current position in the file * @return {Number} the position in the file */ MultiBufferStream.prototype.getPosition = function () { if (this.bufferIndex === -1 || this.buffers[this.bufferIndex] === null) { throw "Error accessing position in the MultiBufferStream"; } return this.buffers[this.bufferIndex].fileStart + this.position; }; /** * Returns the length of the current buffer * @return {Number} the length of the current buffer */ MultiBufferStream.prototype.getLength = function () { return this.byteLength; }; MultiBufferStream.prototype.getEndPosition = function () { if (this.bufferIndex === -1 || this.buffers[this.bufferIndex] === null) { throw "Error accessing position in the MultiBufferStream"; } return this.buffers[this.bufferIndex].fileStart + this.byteLength; }; { exports.MultiBufferStream = MultiBufferStream; } // file:src/descriptor.js /* * Copyright (c) 2012-2013. Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ var MPEG4DescriptorParser = function () { var ES_DescrTag = 0x03; var DecoderConfigDescrTag = 0x04; var DecSpecificInfoTag = 0x05; var SLConfigDescrTag = 0x06; var descTagToName = []; descTagToName[ES_DescrTag] = "ES_Descriptor"; descTagToName[DecoderConfigDescrTag] = "DecoderConfigDescriptor"; descTagToName[DecSpecificInfoTag] = "DecoderSpecificInfo"; descTagToName[SLConfigDescrTag] = "SLConfigDescriptor"; this.getDescriptorName = function (tag) { return descTagToName[tag]; }; var that = this; var classes = {}; this.parseOneDescriptor = function (stream) { var size = 0; var tag; var desc; var byteRead; tag = stream.readUint8(); byteRead = stream.readUint8(); while (byteRead & 0x80) { size = (byteRead & 0x7F) << 7; byteRead = stream.readUint8(); } size += byteRead & 0x7F; Log.debug("MPEG4DescriptorParser", "Found " + (descTagToName[tag] || "Descriptor " + tag) + ", size " + size + " at position " + stream.getPosition()); if (descTagToName[tag]) { desc = new classes[descTagToName[tag]](size); } else { desc = new classes.Descriptor(size); } desc.parse(stream); return desc; }; classes.Descriptor = function (_tag, _size) { this.tag = _tag; this.size = _size; this.descs = []; }; classes.Descriptor.prototype.parse = function (stream) { this.data = stream.readUint8Array(this.size); }; classes.Descriptor.prototype.findDescriptor = function (tag) { for (var i = 0; i < this.descs.length; i++) { if (this.descs[i].tag == tag) { return this.descs[i]; } } return null; }; classes.Descriptor.prototype.parseRemainingDescriptors = function (stream) { var start = stream.position; while (stream.position < start + this.size) { var desc = that.parseOneDescriptor(stream); this.descs.push(desc); } }; classes.ES_Descriptor = function (size) { classes.Descriptor.call(this, ES_DescrTag, size); }; classes.ES_Descriptor.prototype = new classes.Descriptor(); classes.ES_Descriptor.prototype.parse = function (stream) { this.ES_ID = stream.readUint16(); this.flags = stream.readUint8(); this.size -= 3; if (this.flags & 0x80) { this.dependsOn_ES_ID = stream.readUint16(); this.size -= 2; } else { this.dependsOn_ES_ID = 0; } if (this.flags & 0x40) { var l = stream.readUint8(); this.URL = stream.readString(l); this.size -= l + 1; } else { this.URL = ""; } if (this.flags & 0x20) { this.OCR_ES_ID = stream.readUint16(); this.size -= 2; } else { this.OCR_ES_ID = 0; } this.parseRemainingDescriptors(stream); }; classes.ES_Descriptor.prototype.getOTI = function (stream) { var dcd = this.findDescriptor(DecoderConfigDescrTag); if (dcd) { return dcd.oti; } else { return 0; } }; classes.ES_Descriptor.prototype.getAudioConfig = function (stream) { var dcd = this.findDescriptor(DecoderConfigDescrTag); if (!dcd) return null; var dsi = dcd.findDescriptor(DecSpecificInfoTag); if (dsi && dsi.data) { var audioObjectType = (dsi.data[0] & 0xF8) >> 3; if (audioObjectType === 31 && dsi.data.length >= 2) { audioObjectType = 32 + ((dsi.data[0] & 0x7) << 3) + ((dsi.data[1] & 0xE0) >> 5); } return audioObjectType; } else { return null; } }; classes.DecoderConfigDescriptor = function (size) { classes.Descriptor.call(this, DecoderConfigDescrTag, size); }; classes.DecoderConfigDescriptor.prototype = new classes.Descriptor(); classes.DecoderConfigDescriptor.prototype.parse = function (stream) { this.oti = stream.readUint8(); this.streamType = stream.readUint8(); this.upStream = (this.streamType >> 1 & 1) !== 0; this.streamType = this.streamType >>> 2; this.bufferSize = stream.readUint24(); this.maxBitrate = stream.readUint32(); this.avgBitrate = stream.readUint32(); this.size -= 13; this.parseRemainingDescriptors(stream); }; classes.DecoderSpecificInfo = function (size) { classes.Descriptor.call(this, DecSpecificInfoTag, size); }; classes.DecoderSpecificInfo.prototype = new classes.Descriptor(); classes.SLConfigDescriptor = function (size) { classes.Descriptor.call(this, SLConfigDescrTag, size); }; classes.SLConfigDescriptor.prototype = new classes.Descriptor(); return this; }; { exports.MPEG4DescriptorParser = MPEG4DescriptorParser; } // file:src/box.js /* * Copyright (c) 2012-2013. Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ var BoxParser = { ERR_INVALID_DATA: -1, ERR_NOT_ENOUGH_DATA: 0, OK: 1, // Boxes to be created with default parsing BASIC_BOXES: ["mdat", "idat", "free", "skip", "meco", "strk"], FULL_BOXES: ["hmhd", "nmhd", "iods", "xml ", "bxml", "ipro", "mere"], CONTAINER_BOXES: [["moov", ["trak", "pssh"]], ["trak"], ["edts"], ["mdia"], ["minf"], ["dinf"], ["stbl", ["sgpd", "sbgp"]], ["mvex", ["trex"]], ["moof", ["traf"]], ["traf", ["trun", "sgpd", "sbgp"]], ["vttc"], ["tref"], ["iref"], ["mfra", ["tfra"]], ["meco"], ["hnti"], ["hinf"], ["strk"], ["strd"], ["sinf"], ["rinf"], ["schi"], ["trgr"], ["udta", ["kind"]], ["iprp", ["ipma"]], ["ipco"], ["grpl"], ["j2kH"], ["etyp", ["tyco"]]], // Boxes effectively created boxCodes: [], fullBoxCodes: [], containerBoxCodes: [], sampleEntryCodes: {}, sampleGroupEntryCodes: [], trackGroupTypes: [], UUIDBoxes: {}, UUIDs: [], initialize: function () { BoxParser.FullBox.prototype = new BoxParser.Box(); BoxParser.ContainerBox.prototype = new BoxParser.Box(); BoxParser.SampleEntry.prototype = new BoxParser.Box(); BoxParser.TrackGroupTypeBox.prototype = new BoxParser.FullBox(); /* creating constructors for simple boxes */ BoxParser.BASIC_BOXES.forEach(function (type) { BoxParser.createBoxCtor(type); }); BoxParser.FULL_BOXES.forEach(function (type) { BoxParser.createFullBoxCtor(type); }); BoxParser.CONTAINER_BOXES.forEach(function (types) { BoxParser.createContainerBoxCtor(types[0], null, types[1]); }); }, Box: function (_type, _size, _uuid) { this.type = _type; this.size = _size; this.uuid = _uuid; }, FullBox: function (type, size, uuid) { BoxParser.Box.call(this, type, size, uuid); this.flags = 0; this.version = 0; }, ContainerBox: function (type, size, uuid) { BoxParser.Box.call(this, type, size, uuid); this.boxes = []; }, SampleEntry: function (type, size, hdr_size, start) { BoxParser.ContainerBox.call(this, type, size); this.hdr_size = hdr_size; this.start = start; }, SampleGroupEntry: function (type) { this.grouping_type = type; }, TrackGroupTypeBox: function (type, size) { BoxParser.FullBox.call(this, type, size); }, createBoxCtor: function (type, parseMethod) { BoxParser.boxCodes.push(type); BoxParser[type + "Box"] = function (size) { BoxParser.Box.call(this, type, size); }; BoxParser[type + "Box"].prototype = new BoxParser.Box(); if (parseMethod) BoxParser[type + "Box"].prototype.parse = parseMethod; }, createFullBoxCtor: function (type, parseMethod) { //BoxParser.fullBoxCodes.push(type); BoxParser[type + "Box"] = function (size) { BoxParser.FullBox.call(this, type, size); }; BoxParser[type + "Box"].prototype = new BoxParser.FullBox(); BoxParser[type + "Box"].prototype.parse = function (stream) { this.parseFullHeader(stream); if (parseMethod) { parseMethod.call(this, stream); } }; }, addSubBoxArrays: function (subBoxNames) { if (subBoxNames) { this.subBoxNames = subBoxNames; var nbSubBoxes = subBoxNames.length; for (var k = 0; k < nbSubBoxes; k++) { this[subBoxNames[k] + "s"] = []; } } }, createContainerBoxCtor: function (type, parseMethod, subBoxNames) { //BoxParser.containerBoxCodes.push(type); BoxParser[type + "Box"] = function (size) { BoxParser.ContainerBox.call(this, type, size); BoxParser.addSubBoxArrays.call(this, subBoxNames); }; BoxParser[type + "Box"].prototype = new BoxParser.ContainerBox(); if (parseMethod) BoxParser[type + "Box"].prototype.parse = parseMethod; }, createMediaSampleEntryCtor: function (mediaType, parseMethod, subBoxNames) { BoxParser.sampleEntryCodes[mediaType] = []; BoxParser[mediaType + "SampleEntry"] = function (type, size) { BoxParser.SampleEntry.call(this, type, size); BoxParser.addSubBoxArrays.call(this, subBoxNames); }; BoxParser[mediaType + "SampleEntry"].prototype = new BoxParser.SampleEntry(); if (parseMethod) BoxParser[mediaType + "SampleEntry"].prototype.parse = parseMethod; }, createSampleEntryCtor: function (mediaType, type, parseMethod, subBoxNames) { BoxParser.sampleEntryCodes[mediaType].push(type); BoxParser[type + "SampleEntry"] = function (size) { BoxParser[mediaType + "SampleEntry"].call(this, type, size); BoxParser.addSubBoxArrays.call(this, subBoxNames); }; BoxParser[type + "SampleEntry"].prototype = new BoxParser[mediaType + "SampleEntry"](); if (parseMethod) BoxParser[type + "SampleEntry"].prototype.parse = parseMethod; }, createEncryptedSampleEntryCtor: function (mediaType, type, parseMethod) { BoxParser.createSampleEntryCtor.call(this, mediaType, type, parseMethod, ["sinf"]); }, createSampleGroupCtor: function (type, parseMethod) { //BoxParser.sampleGroupEntryCodes.push(type); BoxParser[type + "SampleGroupEntry"] = function (size) { BoxParser.SampleGroupEntry.call(this, type, size); }; BoxParser[type + "SampleGroupEntry"].prototype = new BoxParser.SampleGroupEntry(); if (parseMethod) BoxParser[type + "SampleGroupEntry"].prototype.parse = parseMethod; }, createTrackGroupCtor: function (type, parseMethod) { //BoxParser.trackGroupTypes.push(type); BoxParser[type + "TrackGroupTypeBox"] = function (size) { BoxParser.TrackGroupTypeBox.call(this, type, size); }; BoxParser[type + "TrackGroupTypeBox"].prototype = new BoxParser.TrackGroupTypeBox(); if (parseMethod) BoxParser[type + "TrackGroupTypeBox"].prototype.parse = parseMethod; }, createUUIDBox: function (uuid, isFullBox, isContainerBox, parseMethod) { BoxParser.UUIDs.push(uuid); BoxParser.UUIDBoxes[uuid] = function (size) { if (isFullBox) { BoxParser.FullBox.call(this, "uuid", size, uuid); } else { if (isContainerBox) { BoxParser.ContainerBox.call(this, "uuid", size, uuid); } else { BoxParser.Box.call(this, "uuid", size, uuid); } } }; BoxParser.UUIDBoxes[uuid].prototype = isFullBox ? new BoxParser.FullBox() : isContainerBox ? new BoxParser.ContainerBox() : new BoxParser.Box(); if (parseMethod) { if (isFullBox) { BoxParser.UUIDBoxes[uuid].prototype.parse = function (stream) { this.parseFullHeader(stream); if (parseMethod) { parseMethod.call(this, stream); } }; } else { BoxParser.UUIDBoxes[uuid].prototype.parse = parseMethod; } } } }; BoxParser.initialize(); BoxParser.TKHD_FLAG_ENABLED = 0x000001; BoxParser.TKHD_FLAG_IN_MOVIE = 0x000002; BoxParser.TKHD_FLAG_IN_PREVIEW = 0x000004; BoxParser.TFHD_FLAG_BASE_DATA_OFFSET = 0x01; BoxParser.TFHD_FLAG_SAMPLE_DESC = 0x02; BoxParser.TFHD_FLAG_SAMPLE_DUR = 0x08; BoxParser.TFHD_FLAG_SAMPLE_SIZE = 0x10; BoxParser.TFHD_FLAG_SAMPLE_FLAGS = 0x20; BoxParser.TFHD_FLAG_DUR_EMPTY = 0x10000; BoxParser.TFHD_FLAG_DEFAULT_BASE_IS_MOOF = 0x20000; BoxParser.TRUN_FLAGS_DATA_OFFSET = 0x01; BoxParser.TRUN_FLAGS_FIRST_FLAG = 0x04; BoxParser.TRUN_FLAGS_DURATION = 0x100; BoxParser.TRUN_FLAGS_SIZE = 0x200; BoxParser.TRUN_FLAGS_FLAGS = 0x400; BoxParser.TRUN_FLAGS_CTS_OFFSET = 0x800; BoxParser.Box.prototype.add = function (name) { return this.addBox(new BoxParser[name + "Box"]()); }; BoxParser.Box.prototype.addBox = function (box) { this.boxes.push(box); if (this[box.type + "s"]) { this[box.type + "s"].push(box); } else { this[box.type] = box; } return box; }; BoxParser.Box.prototype.set = function (prop, value) { this[prop] = value; return this; }; BoxParser.Box.prototype.addEntry = function (value, _prop) { var prop = _prop || "entries"; if (!this[prop]) { this[prop] = []; } this[prop].push(value); return this; }; { exports.BoxParser = BoxParser; } // file:src/box-parse.js /* * Copyright (c) Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ BoxParser.parseUUID = function (stream) { return BoxParser.parseHex16(stream); }; BoxParser.parseHex16 = function (stream) { var hex16 = ""; for (var i = 0; i < 16; i++) { var hex = stream.readUint8().toString(16); hex16 += hex.length === 1 ? "0" + hex : hex; } return hex16; }; BoxParser.parseOneBox = function (stream, headerOnly, parentSize) { var box; var start = stream.getPosition(); var hdr_size = 0; var diff; var uuid; if (stream.getEndPosition() - start < 8) { Log.debug("BoxParser", "Not enough data in stream to parse the type and size of the box"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA }; } if (parentSize && parentSize < 8) { Log.debug("BoxParser", "Not enough bytes left in the parent box to parse a new box"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA }; } var size = stream.readUint32(); var type = stream.readString(4); var box_type = type; Log.debug("BoxParser", "Found box of type '" + type + "' and size " + size + " at position " + start); hdr_size = 8; if (type == "uuid") { if (stream.getEndPosition() - stream.getPosition() < 16 || parentSize - hdr_size < 16) { stream.seek(start); Log.debug("BoxParser", "Not enough bytes left in the parent box to parse a UUID box"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA }; } uuid = BoxParser.parseUUID(stream); hdr_size += 16; box_type = uuid; } if (size == 1) { if (stream.getEndPosition() - stream.getPosition() < 8 || parentSize && parentSize - hdr_size < 8) { stream.seek(start); Log.warn("BoxParser", "Not enough data in stream to parse the extended size of the \"" + type + "\" box"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA }; } size = stream.readUint64(); hdr_size += 8; } else if (size === 0) { /* box extends till the end of file or invalid file */ if (parentSize) { size = parentSize; } else { /* box extends till the end of file */ if (type !== "mdat") { Log.error("BoxParser", "Unlimited box size not supported for type: '" + type + "'"); box = new BoxParser.Box(type, size); return { code: BoxParser.OK, box: box, size: box.size }; } } } if (size !== 0 && size < hdr_size) { Log.error("BoxParser", "Box of type " + type + " has an invalid size " + size + " (too small to be a box)"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA, type: type, size: size, hdr_size: hdr_size, start: start }; } if (size !== 0 && parentSize && size > parentSize) { Log.error("BoxParser", "Box of type '" + type + "' has a size " + size + " greater than its container size " + parentSize); return { code: BoxParser.ERR_NOT_ENOUGH_DATA, type: type, size: size, hdr_size: hdr_size, start: start }; } if (size !== 0 && start + size > stream.getEndPosition()) { stream.seek(start); Log.info("BoxParser", "Not enough data in stream to parse the entire '" + type + "' box"); return { code: BoxParser.ERR_NOT_ENOUGH_DATA, type: type, size: size, hdr_size: hdr_size, start: start }; } if (headerOnly) { return { code: BoxParser.OK, type: type, size: size, hdr_size: hdr_size, start: start }; } else { if (BoxParser[type + "Box"]) { box = new BoxParser[type + "Box"](size); } else { if (type !== "uuid") { Log.warn("BoxParser", "Unknown box type: '" + type + "'"); box = new BoxParser.Box(type, size); box.has_unparsed_data = true; } else { if (BoxParser.UUIDBoxes[uuid]) { box = new BoxParser.UUIDBoxes[uuid](size); } else { Log.warn("BoxParser", "Unknown uuid type: '" + uuid + "'"); box = new BoxParser.Box(type, size); box.uuid = uuid; box.has_unparsed_data = true; } } } } box.hdr_size = hdr_size; /* recording the position of the box in the input stream */ box.start = start; if (box.write === BoxParser.Box.prototype.write && box.type !== "mdat") { Log.info("BoxParser", "'" + box_type + "' box writing not yet implemented, keeping unparsed data in memory for later write"); box.parseDataAndRewind(stream); } box.parse(stream); diff = stream.getPosition() - (box.start + box.size); if (diff < 0) { Log.warn("BoxParser", "Parsing of box '" + box_type + "' did not read the entire indicated box data size (missing " + -diff + " bytes), seeking forward"); stream.seek(box.start + box.size); } else if (diff > 0) { Log.error("BoxParser", "Parsing of box '" + box_type + "' read " + diff + " more bytes than the indicated box data size, seeking backwards"); if (box.size !== 0) stream.seek(box.start + box.size); } return { code: BoxParser.OK, box: box, size: box.size }; }; BoxParser.Box.prototype.parse = function (stream) { if (this.type != "mdat") { this.data = stream.readUint8Array(this.size - this.hdr_size); } else { if (this.size === 0) { stream.seek(stream.getEndPosition()); } else { stream.seek(this.start + this.size); } } }; /* Used to parse a box without consuming its data, to allow detailled parsing Useful for boxes for which a write method is not yet implemented */ BoxParser.Box.prototype.parseDataAndRewind = function (stream) { this.data = stream.readUint8Array(this.size - this.hdr_size); // rewinding stream.position -= this.size - this.hdr_size; }; BoxParser.FullBox.prototype.parseDataAndRewind = function (stream) { this.parseFullHeader(stream); this.data = stream.readUint8Array(this.size - this.hdr_size); // restore the header size as if the full header had not been parsed this.hdr_size -= 4; // rewinding stream.position -= this.size - this.hdr_size; }; BoxParser.FullBox.prototype.parseFullHeader = function (stream) { this.version = stream.readUint8(); this.flags = stream.readUint24(); this.hdr_size += 4; }; BoxParser.FullBox.prototype.parse = function (stream) { this.parseFullHeader(stream); this.data = stream.readUint8Array(this.size - this.hdr_size); }; BoxParser.ContainerBox.prototype.parse = function (stream) { var ret; var box; while (stream.getPosition() < this.start + this.size) { ret = BoxParser.parseOneBox(stream, false, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { box = ret.box; /* store the box in the 'boxes' array to preserve box order (for offset) but also store box in a property for more direct access */ this.boxes.push(box); if (this.subBoxNames && this.subBoxNames.indexOf(box.type) != -1) { this[this.subBoxNames[this.subBoxNames.indexOf(box.type)] + "s"].push(box); } else { var box_type = box.type !== "uuid" ? box.type : box.uuid; if (this[box_type]) { Log.warn("Box of type " + box_type + " already stored in field of this type"); } else { this[box_type] = box; } } } else { return; } } }; BoxParser.Box.prototype.parseLanguage = function (stream) { this.language = stream.readUint16(); var chars = []; chars[0] = this.language >> 10 & 0x1F; chars[1] = this.language >> 5 & 0x1F; chars[2] = this.language & 0x1F; this.languageString = String.fromCharCode(chars[0] + 0x60, chars[1] + 0x60, chars[2] + 0x60); }; // file:src/parsing/sampleentries/sampleentry.js BoxParser.SAMPLE_ENTRY_TYPE_VISUAL = "Visual"; BoxParser.SAMPLE_ENTRY_TYPE_AUDIO = "Audio"; BoxParser.SAMPLE_ENTRY_TYPE_HINT = "Hint"; BoxParser.SAMPLE_ENTRY_TYPE_METADATA = "Metadata"; BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE = "Subtitle"; BoxParser.SAMPLE_ENTRY_TYPE_SYSTEM = "System"; BoxParser.SAMPLE_ENTRY_TYPE_TEXT = "Text"; BoxParser.SampleEntry.prototype.parseHeader = function (stream) { stream.readUint8Array(6); this.data_reference_index = stream.readUint16(); this.hdr_size += 8; }; BoxParser.SampleEntry.prototype.parse = function (stream) { this.parseHeader(stream); this.data = stream.readUint8Array(this.size - this.hdr_size); }; BoxParser.SampleEntry.prototype.parseDataAndRewind = function (stream) { this.parseHeader(stream); this.data = stream.readUint8Array(this.size - this.hdr_size); // restore the header size as if the sample entry header had not been parsed this.hdr_size -= 8; // rewinding stream.position -= this.size - this.hdr_size; }; BoxParser.SampleEntry.prototype.parseFooter = function (stream) { BoxParser.ContainerBox.prototype.parse.call(this, stream); }; // Base SampleEntry types with default parsing BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_HINT); BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_METADATA); BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE); BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SYSTEM); BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_TEXT); //Base SampleEntry types for Audio and Video with specific parsing BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, function (stream) { var compressorname_length; this.parseHeader(stream); stream.readUint16(); stream.readUint16(); stream.readUint32Array(3); this.width = stream.readUint16(); this.height = stream.readUint16(); this.horizresolution = stream.readUint32(); this.vertresolution = stream.readUint32(); stream.readUint32(); this.frame_count = stream.readUint16(); compressorname_length = Math.min(31, stream.readUint8()); this.compressorname = stream.readString(compressorname_length); if (compressorname_length < 31) { stream.readString(31 - compressorname_length); } this.depth = stream.readUint16(); stream.readUint16(); this.parseFooter(stream); }); BoxParser.createMediaSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, function (stream) { this.parseHeader(stream); stream.readUint32Array(2); this.channel_count = stream.readUint16(); this.samplesize = stream.readUint16(); stream.readUint16(); stream.readUint16(); this.samplerate = stream.readUint32() / (1 << 16); this.parseFooter(stream); }); // Sample entries inheriting from Audio and Video BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "avc1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "avc2"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "avc3"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "avc4"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "av01"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "dav1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "hvc1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "hev1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "hvt1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "lhe1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "dvh1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "dvhe"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vvc1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vvi1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vvs1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vvcN"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vp08"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "vp09"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "avs3"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "j2ki"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "mjp2"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "mjpg"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "uncv"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "mp4a"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "ac-3"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "ac-4"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "ec-3"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "Opus"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "mha1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "mha2"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "mhm1"); BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "mhm2"); // Encrypted sample entries BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_VISUAL, "encv"); BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_AUDIO, "enca"); BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE, "encu"); BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SYSTEM, "encs"); BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_TEXT, "enct"); BoxParser.createEncryptedSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_METADATA, "encm"); // file:src/parsing/a1lx.js BoxParser.createBoxCtor("a1lx", function (stream) { var large_size = stream.readUint8() & 1; var FieldLength = ((large_size & 1) + 1) * 16; this.layer_size = []; for (var i = 0; i < 3; i++) { if (FieldLength == 16) { this.layer_size[i] = stream.readUint16(); } else { this.layer_size[i] = stream.readUint32(); } } }); // file:src/parsing/a1op.js BoxParser.createBoxCtor("a1op", function (stream) { this.op_index = stream.readUint8(); }); // file:src/parsing/auxC.js BoxParser.createFullBoxCtor("auxC", function (stream) { this.aux_type = stream.readCString(); var aux_subtype_length = this.size - this.hdr_size - (this.aux_type.length + 1); this.aux_subtype = stream.readUint8Array(aux_subtype_length); }); // file:src/parsing/av1C.js BoxParser.createBoxCtor("av1C", function (stream) { var tmp = stream.readUint8(); if (tmp >> 7 & 0x1 !== 1) { Log.error("av1C marker problem"); return; } this.version = tmp & 0x7F; if (this.version !== 1) { Log.error("av1C version " + this.version + " not supported"); return; } tmp = stream.readUint8(); this.seq_profile = tmp >> 5 & 0x7; this.seq_level_idx_0 = tmp & 0x1F; tmp = stream.readUint8(); this.seq_tier_0 = tmp >> 7 & 0x1; this.high_bitdepth = tmp >> 6 & 0x1; this.twelve_bit = tmp >> 5 & 0x1; this.monochrome = tmp >> 4 & 0x1; this.chroma_subsampling_x = tmp >> 3 & 0x1; this.chroma_subsampling_y = tmp >> 2 & 0x1; this.chroma_sample_position = tmp & 0x3; tmp = stream.readUint8(); this.reserved_1 = tmp >> 5 & 0x7; if (this.reserved_1 !== 0) { Log.error("av1C reserved_1 parsing problem"); return; } this.initial_presentation_delay_present = tmp >> 4 & 0x1; if (this.initial_presentation_delay_present === 1) { this.initial_presentation_delay_minus_one = tmp & 0xF; } else { this.reserved_2 = tmp & 0xF; if (this.reserved_2 !== 0) { Log.error("av1C reserved_2 parsing problem"); return; } } var configOBUs_length = this.size - this.hdr_size - 4; this.configOBUs = stream.readUint8Array(configOBUs_length); }); // file:src/parsing/avcC.js BoxParser.createBoxCtor("avcC", function (stream) { var i; var toparse; this.configurationVersion = stream.readUint8(); this.AVCProfileIndication = stream.readUint8(); this.profile_compatibility = stream.readUint8(); this.AVCLevelIndication = stream.readUint8(); this.lengthSizeMinusOne = stream.readUint8() & 0x3; this.nb_SPS_nalus = stream.readUint8() & 0x1F; toparse = this.size - this.hdr_size - 6; this.SPS = []; for (i = 0; i < this.nb_SPS_nalus; i++) { this.SPS[i] = {}; this.SPS[i].length = stream.readUint16(); this.SPS[i].nalu = stream.readUint8Array(this.SPS[i].length); toparse -= 2 + this.SPS[i].length; } this.nb_PPS_nalus = stream.readUint8(); toparse--; this.PPS = []; for (i = 0; i < this.nb_PPS_nalus; i++) { this.PPS[i] = {}; this.PPS[i].length = stream.readUint16(); this.PPS[i].nalu = stream.readUint8Array(this.PPS[i].length); toparse -= 2 + this.PPS[i].length; } if (toparse > 0) { this.ext = stream.readUint8Array(toparse); } }); // file:src/parsing/btrt.js BoxParser.createBoxCtor("btrt", function (stream) { this.bufferSizeDB = stream.readUint32(); this.maxBitrate = stream.readUint32(); this.avgBitrate = stream.readUint32(); }); // file:src/parsing/ccst.js BoxParser.createFullBoxCtor("ccst", function (stream) { var flags = stream.readUint8(); this.all_ref_pics_intra = (flags & 0x80) == 0x80; this.intra_pred_used = (flags & 0x40) == 0x40; this.max_ref_per_pic = (flags & 0x3f) >> 2; stream.readUint24(); }); // file:src/parsing/cdef.js BoxParser.createBoxCtor("cdef", function (stream) { var i; this.channel_count = stream.readUint16(); this.channel_indexes = []; this.channel_types = []; this.channel_associations = []; for (i = 0; i < this.channel_count; i++) { this.channel_indexes.push(stream.readUint16()); this.channel_types.push(stream.readUint16()); this.channel_associations.push(stream.readUint16()); } }); // file:src/parsing/clap.js BoxParser.createBoxCtor("clap", function (stream) { this.cleanApertureWidthN = stream.readUint32(); this.cleanApertureWidthD = stream.readUint32(); this.cleanApertureHeightN = stream.readUint32(); this.cleanApertureHeightD = stream.readUint32(); this.horizOffN = stream.readUint32(); this.horizOffD = stream.readUint32(); this.vertOffN = stream.readUint32(); this.vertOffD = stream.readUint32(); }); // file:src/parsing/clli.js BoxParser.createBoxCtor("clli", function (stream) { this.max_content_light_level = stream.readUint16(); this.max_pic_average_light_level = stream.readUint16(); }); // file:src/parsing/cmex.js BoxParser.createFullBoxCtor("cmex", function (stream) { if (this.flags & 0x1) { this.pos_x = stream.readInt32(); } if (this.flags & 0x2) { this.pos_y = stream.readInt32(); } if (this.flags & 0x4) { this.pos_z = stream.readInt32(); } if (this.flags & 0x8) { if (this.version == 0) { if (this.flags & 0x10) { this.quat_x = stream.readInt32(); this.quat_y = stream.readInt32(); this.quat_z = stream.readInt32(); } else { this.quat_x = stream.readInt16(); this.quat_y = stream.readInt16(); this.quat_z = stream.readInt16(); } } else if (this.version == 1) ; } if (this.flags & 0x20) { this.id = stream.readUint32(); } }); // file:src/parsing/cmin.js BoxParser.createFullBoxCtor("cmin", function (stream) { this.focal_length_x = stream.readInt32(); this.principal_point_x = stream.readInt32(); this.principal_point_y = stream.readInt32(); if (this.flags & 0x1) { this.focal_length_y = stream.readInt32(); this.skew_factor = stream.readInt32(); } }); // file:src/parsing/cmpd.js BoxParser.createBoxCtor("cmpd", function (stream) { this.component_count = stream.readUint32(); this.component_types = []; this.component_type_urls = []; for (i = 0; i < this.component_count; i++) { var component_type = stream.readUint16(); this.component_types.push(component_type); if (component_type >= 0x8000) { this.component_type_urls.push(stream.readCString()); } } }); // file:src/parsing/co64.js BoxParser.createFullBoxCtor("co64", function (stream) { var entry_count; var i; entry_count = stream.readUint32(); this.chunk_offsets = []; if (this.version === 0) { for (i = 0; i < entry_count; i++) { this.chunk_offsets.push(stream.readUint64()); } } }); // file:src/parsing/CoLL.js BoxParser.createFullBoxCtor("CoLL", function (stream) { this.maxCLL = stream.readUint16(); this.maxFALL = stream.readUint16(); }); // file:src/parsing/colr.js BoxParser.createBoxCtor("colr", function (stream) { this.colour_type = stream.readString(4); if (this.colour_type === 'nclx') { this.colour_primaries = stream.readUint16(); this.transfer_characteristics = stream.readUint16(); this.matrix_coefficients = stream.readUint16(); var tmp = stream.readUint8(); this.full_range_flag = tmp >> 7; } else if (this.colour_type === 'rICC') { this.ICC_profile = stream.readUint8Array(this.size - 4); } else if (this.colour_type === 'prof') { this.ICC_profile = stream.readUint8Array(this.size - 4); } }); // file:src/parsing/cprt.js BoxParser.createFullBoxCtor("cprt", function (stream) { this.parseLanguage(stream); this.notice = stream.readCString(); }); // file:src/parsing/cslg.js BoxParser.createFullBoxCtor("cslg", function (stream) { if (this.version === 0) { this.compositionToDTSShift = stream.readInt32(); /* signed */ this.leastDecodeToDisplayDelta = stream.readInt32(); /* signed */ this.greatestDecodeToDisplayDelta = stream.readInt32(); /* signed */ this.compositionStartTime = stream.readInt32(); /* signed */ this.compositionEndTime = stream.readInt32(); /* signed */ } }); // file:src/parsing/ctts.js BoxParser.createFullBoxCtor("ctts", function (stream) { var entry_count; var i; entry_count = stream.readUint32(); this.sample_counts = []; this.sample_offsets = []; if (this.version === 0) { for (i = 0; i < entry_count; i++) { this.sample_counts.push(stream.readUint32()); /* some files are buggy and declare version=0 while using signed offsets. The likelyhood of using the most significant bit in a 32-bits time offset is very low, so using signed value here as well */ var value = stream.readInt32(); if (value < 0) { Log.warn("BoxParser", "ctts box uses negative values without using version 1"); } this.sample_offsets.push(value); } } else if (this.version == 1) { for (i = 0; i < entry_count; i++) { this.sample_counts.push(stream.readUint32()); this.sample_offsets.push(stream.readInt32()); /* signed */ } } }); // file:src/parsing/dac3.js BoxParser.createBoxCtor("dac3", function (stream) { var tmp_byte1 = stream.readUint8(); var tmp_byte2 = stream.readUint8(); var tmp_byte3 = stream.readUint8(); this.fscod = tmp_byte1 >> 6; this.bsid = tmp_byte1 >> 1 & 0x1F; this.bsmod = (tmp_byte1 & 0x1) << 2 | tmp_byte2 >> 6 & 0x3; this.acmod = tmp_byte2 >> 3 & 0x7; this.lfeon = tmp_byte2 >> 2 & 0x1; this.bit_rate_code = tmp_byte2 & 0x3 | tmp_byte3 >> 5 & 0x7; }); // file:src/parsing/dec3.js BoxParser.createBoxCtor("dec3", function (stream) { var tmp_16 = stream.readUint16(); this.data_rate = tmp_16 >> 3; this.num_ind_sub = tmp_16 & 0x7; this.ind_subs = []; for (var i = 0; i < this.num_ind_sub + 1; i++) { var ind_sub = {}; this.ind_subs.push(ind_sub); var tmp_byte1 = stream.readUint8(); var tmp_byte2 = stream.readUint8(); var tmp_byte3 = stream.readUint8(); ind_sub.fscod = tmp_byte1 >> 6; ind_sub.bsid = tmp_byte1 >> 1 & 0x1F; ind_sub.bsmod = (tmp_byte1 & 0x1) << 4 | tmp_byte2 >> 4 & 0xF; ind_sub.acmod = tmp_byte2 >> 1 & 0x7; ind_sub.lfeon = tmp_byte2 & 0x1; ind_sub.num_dep_sub = tmp_byte3 >> 1 & 0xF; if (ind_sub.num_dep_sub > 0) { ind_sub.chan_loc = (tmp_byte3 & 0x1) << 8 | stream.readUint8(); } } }); // file:src/parsing/dfLa.js BoxParser.createFullBoxCtor("dfLa", function (stream) { var BLOCKTYPE_MASK = 0x7F; var LASTMETADATABLOCKFLAG_MASK = 0x80; var boxesFound = []; var knownBlockTypes = ["STREAMINFO", "PADDING", "APPLICATION", "SEEKTABLE", "VORBIS_COMMENT", "CUESHEET", "PICTURE", "RESERVED"]; // dfLa is a FullBox this.parseFullHeader(stream); // for (i=0; ; i++) { // to end of box do { var flagAndType = stream.readUint8(); var type = Math.min(flagAndType & BLOCKTYPE_MASK, knownBlockTypes.length - 1); // if this is a STREAMINFO block, read the true samplerate since this // can be different to the AudioSampleEntry samplerate. if (!type) { // read past all the other stuff stream.readUint8Array(13); // extract samplerate this.samplerate = stream.readUint32() >> 12; // read to end of STREAMINFO stream.readUint8Array(20); } else { // not interested in other block types so just discard length bytes stream.readUint8Array(stream.readUint24()); } boxesFound.push(knownBlockTypes[type]); if (!!(flagAndType & LASTMETADATABLOCKFLAG_MASK)) { break; } } while (true); this.numMetadataBlocks = boxesFound.length + " (" + boxesFound.join(", ") + ")"; }); // file:src/parsing/dimm.js BoxParser.createBoxCtor("dimm", function (stream) { this.bytessent = stream.readUint64(); }); // file:src/parsing/dmax.js BoxParser.createBoxCtor("dmax", function (stream) { this.time = stream.readUint32(); }); // file:src/parsing/dmed.js BoxParser.createBoxCtor("dmed", function (stream) { this.bytessent = stream.readUint64(); }); // file:src/parsing/dOps.js BoxParser.createBoxCtor("dOps", function (stream) { this.Version = stream.readUint8(); this.OutputChannelCount = stream.readUint8(); this.PreSkip = stream.readUint16(); this.InputSampleRate = stream.readUint32(); this.OutputGain = stream.readInt16(); this.ChannelMappingFamily = stream.readUint8(); if (this.ChannelMappingFamily !== 0) { this.StreamCount = stream.readUint8(); this.CoupledCount = stream.readUint8(); this.ChannelMapping = []; for (var i = 0; i < this.OutputChannelCount; i++) { this.ChannelMapping[i] = stream.readUint8(); } } }); // file:src/parsing/dref.js BoxParser.createFullBoxCtor("dref", function (stream) { var ret; var box; this.entries = []; var entry_count = stream.readUint32(); for (var i = 0; i < entry_count; i++) { ret = BoxParser.parseOneBox(stream, false, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { box = ret.box; this.entries.push(box); } else { return; } } }); // file:src/parsing/drep.js BoxParser.createBoxCtor("drep", function (stream) { this.bytessent = stream.readUint64(); }); // file:src/parsing/elng.js BoxParser.createFullBoxCtor("elng", function (stream) { this.extended_language = stream.readString(this.size - this.hdr_size); }); // file:src/parsing/elst.js BoxParser.createFullBoxCtor("elst", function (stream) { this.entries = []; var entry_count = stream.readUint32(); for (var i = 0; i < entry_count; i++) { var entry = {}; this.entries.push(entry); if (this.version === 1) { entry.segment_duration = stream.readUint64(); entry.media_time = stream.readInt64(); } else { entry.segment_duration = stream.readUint32(); entry.media_time = stream.readInt32(); } entry.media_rate_integer = stream.readInt16(); entry.media_rate_fraction = stream.readInt16(); } }); // file:src/parsing/emsg.js BoxParser.createFullBoxCtor("emsg", function (stream) { if (this.version == 1) { this.timescale = stream.readUint32(); this.presentation_time = stream.readUint64(); this.event_duration = stream.readUint32(); this.id = stream.readUint32(); this.scheme_id_uri = stream.readCString(); this.value = stream.readCString(); } else { this.scheme_id_uri = stream.readCString(); this.value = stream.readCString(); this.timescale = stream.readUint32(); this.presentation_time_delta = stream.readUint32(); this.event_duration = stream.readUint32(); this.id = stream.readUint32(); } var message_size = this.size - this.hdr_size - (4 * 4 + (this.scheme_id_uri.length + 1) + (this.value.length + 1)); if (this.version == 1) { message_size -= 4; } this.message_data = stream.readUint8Array(message_size); }); // file:src/parsing/EntityToGroup.js // ISO/IEC 14496-12:2022 Section 8.18.3 Entity to group box BoxParser.createEntityToGroupCtor = function (type, parseMethod) { BoxParser[type + "Box"] = function (size) { BoxParser.FullBox.call(this, type, size); }; BoxParser[type + "Box"].prototype = new BoxParser.FullBox(); BoxParser[type + "Box"].prototype.parse = function (stream) { this.parseFullHeader(stream); if (parseMethod) { parseMethod.call(this, stream); } else { this.group_id = stream.readUint32(); this.num_entities_in_group = stream.readUint32(); this.entity_ids = []; for (i = 0; i < this.num_entities_in_group; i++) { var entity_id = stream.readUint32(); this.entity_ids.push(entity_id); } } }; }; // Auto exposure bracketing (ISO/IEC 23008-12:2022 Section 6.8.6.2.1) BoxParser.createEntityToGroupCtor("aebr"); // Flash exposure bracketing (ISO/IEC 23008-12:2022 Section 6.8.6.5.1) BoxParser.createEntityToGroupCtor("afbr"); // Album collection (ISO/IEC 23008-12:2022 Section 6.8.7.1) BoxParser.createEntityToGroupCtor("albc"); // Alternative entity (ISO/IEC 14496-12:2022 Section 8.18.3.1) BoxParser.createEntityToGroupCtor("altr"); // Burst image entity group (ISO/IEC 23008-12:2022 Section 6.8.2.2) BoxParser.createEntityToGroupCtor("brst"); // Depth of field bracketing (ISO/IEC 23008-12:2022 Section 6.8.6.6.1) BoxParser.createEntityToGroupCtor("dobr"); // Equivalent entity (ISO/IEC 23008-12:2022 Section 6.8.1.1) BoxParser.createEntityToGroupCtor("eqiv"); // Favourites collection (ISO/IEC 23008-12:2022 Section 6.8.7.2) BoxParser.createEntityToGroupCtor("favc"); // Focus bracketing (ISO/IEC 23008-12:2022 Section 6.8.6.4.1) BoxParser.createEntityToGroupCtor("fobr"); // Audio to image entity group (ISO/IEC 23008-12:2022 Section 6.8.4) BoxParser.createEntityToGroupCtor("iaug"); // Panorama (ISO/IEC 23008-12:2022 Section 6.8.8.1) BoxParser.createEntityToGroupCtor("pano"); // Slideshow (ISO/IEC 23008-12:2022 Section 6.8.9.1) BoxParser.createEntityToGroupCtor("slid"); // Stereo pair (ISO/IEC 23008-12:2022 Section 6.8.5) BoxParser.createEntityToGroupCtor("ster"); // Time-synchronised capture entity group (ISO/IEC 23008-12:2022 Section 6.8.3) BoxParser.createEntityToGroupCtor("tsyn"); // White balance bracketing (ISO/IEC 23008-12:2022 Section 6.8.6.3.1) BoxParser.createEntityToGroupCtor("wbbr"); // Alternative entity (ISO/IEC 23008-12:2022 AMD1 Section 6.8.10) BoxParser.createEntityToGroupCtor("prgr"); // file:src/parsing/esds.js BoxParser.createFullBoxCtor("esds", function (stream) { var esd_data = stream.readUint8Array(this.size - this.hdr_size); if (typeof MPEG4DescriptorParser !== "undefined") { var esd_parser = new MPEG4DescriptorParser(); this.esd = esd_parser.parseOneDescriptor(new DataStream(esd_data.buffer, 0, DataStream.BIG_ENDIAN)); } }); // file:src/parsing/fiel.js BoxParser.createBoxCtor("fiel", function (stream) { this.fieldCount = stream.readUint8(); this.fieldOrdering = stream.readUint8(); }); // file:src/parsing/frma.js BoxParser.createBoxCtor("frma", function (stream) { this.data_format = stream.readString(4); }); // file:src/parsing/ftyp.js BoxParser.createBoxCtor("ftyp", function (stream) { var toparse = this.size - this.hdr_size; this.major_brand = stream.readString(4); this.minor_version = stream.readUint32(); toparse -= 8; this.compatible_brands = []; var i = 0; while (toparse >= 4) { this.compatible_brands[i] = stream.readString(4); toparse -= 4; i++; } }); // file:src/parsing/hdlr.js BoxParser.createFullBoxCtor("hdlr", function (stream) { if (this.version === 0) { stream.readUint32(); this.handler = stream.readString(4); stream.readUint32Array(3); this.name = stream.readString(this.size - this.hdr_size - 20); if (this.name[this.name.length - 1] === '\0') { this.name = this.name.slice(0, -1); } } }); // file:src/parsing/hvcC.js BoxParser.createBoxCtor("hvcC", function (stream) { var i, j; var length; var tmp_byte; this.configurationVersion = stream.readUint8(); tmp_byte = stream.readUint8(); this.general_profile_space = tmp_byte >> 6; this.general_tier_flag = (tmp_byte & 0x20) >> 5; this.general_profile_idc = tmp_byte & 0x1F; this.general_profile_compatibility = stream.readUint32(); this.general_constraint_indicator = stream.readUint8Array(6); this.general_level_idc = stream.readUint8(); this.min_spatial_segmentation_idc = stream.readUint16() & 0xFFF; this.parallelismType = stream.readUint8() & 0x3; this.chroma_format_idc = stream.readUint8() & 0x3; this.bit_depth_luma_minus8 = stream.readUint8() & 0x7; this.bit_depth_chroma_minus8 = stream.readUint8() & 0x7; this.avgFrameRate = stream.readUint16(); tmp_byte = stream.readUint8(); this.constantFrameRate = tmp_byte >> 6; this.numTemporalLayers = (tmp_byte & 0XD) >> 3; this.temporalIdNested = (tmp_byte & 0X4) >> 2; this.lengthSizeMinusOne = tmp_byte & 0X3; this.nalu_arrays = []; var numOfArrays = stream.readUint8(); for (i = 0; i < numOfArrays; i++) { var nalu_array = []; this.nalu_arrays.push(nalu_array); tmp_byte = stream.readUint8(); nalu_array.completeness = (tmp_byte & 0x80) >> 7; nalu_array.nalu_type = tmp_byte & 0x3F; var numNalus = stream.readUint16(); for (j = 0; j < numNalus; j++) { var nalu = {}; nalu_array.push(nalu); length = stream.readUint16(); nalu.data = stream.readUint8Array(length); } } }); // file:src/parsing/iinf.js BoxParser.createFullBoxCtor("iinf", function (stream) { var ret; if (this.version === 0) { this.entry_count = stream.readUint16(); } else { this.entry_count = stream.readUint32(); } this.item_infos = []; for (var i = 0; i < this.entry_count; i++) { ret = BoxParser.parseOneBox(stream, false, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { if (ret.box.type !== "infe") { Log.error("BoxParser", "Expected 'infe' box, got " + ret.box.type); } this.item_infos[i] = ret.box; } else { return; } } }); // file:src/parsing/iloc.js BoxParser.createFullBoxCtor("iloc", function (stream) { var byte; byte = stream.readUint8(); this.offset_size = byte >> 4 & 0xF; this.length_size = byte & 0xF; byte = stream.readUint8(); this.base_offset_size = byte >> 4 & 0xF; if (this.version === 1 || this.version === 2) { this.index_size = byte & 0xF; } else { this.index_size = 0; // reserved = byte & 0xF; } this.items = []; var item_count = 0; if (this.version < 2) { item_count = stream.readUint16(); } else if (this.version === 2) { item_count = stream.readUint32(); } else { throw "version of iloc box not supported"; } for (var i = 0; i < item_count; i++) { var item = {}; this.items.push(item); if (this.version < 2) { item.item_ID = stream.readUint16(); } else if (this.version === 2) { item.item_ID = stream.readUint32(); } else { throw "version of iloc box not supported"; } if (this.version === 1 || this.version === 2) { item.construction_method = stream.readUint16() & 0xF; } else { item.construction_method = 0; } item.data_reference_index = stream.readUint16(); switch (this.base_offset_size) { case 0: item.base_offset = 0; break; case 4: item.base_offset = stream.readUint32(); break; case 8: item.base_offset = stream.readUint64(); break; default: throw "Error reading base offset size"; } var extent_count = stream.readUint16(); item.extents = []; for (var j = 0; j < extent_count; j++) { var extent = {}; item.extents.push(extent); if (this.version === 1 || this.version === 2) { switch (this.index_size) { case 0: extent.extent_index = 0; break; case 4: extent.extent_index = stream.readUint32(); break; case 8: extent.extent_index = stream.readUint64(); break; default: throw "Error reading extent index"; } } switch (this.offset_size) { case 0: extent.extent_offset = 0; break; case 4: extent.extent_offset = stream.readUint32(); break; case 8: extent.extent_offset = stream.readUint64(); break; default: throw "Error reading extent index"; } switch (this.length_size) { case 0: extent.extent_length = 0; break; case 4: extent.extent_length = stream.readUint32(); break; case 8: extent.extent_length = stream.readUint64(); break; default: throw "Error reading extent index"; } } } }); // file:src/parsing/imir.js BoxParser.createBoxCtor("imir", function (stream) { var tmp = stream.readUint8(); this.reserved = tmp >> 7; this.axis = tmp & 1; }); // file:src/parsing/infe.js BoxParser.createFullBoxCtor("infe", function (stream) { if (this.version === 0 || this.version === 1) { this.item_ID = stream.readUint16(); this.item_protection_index = stream.readUint16(); this.item_name = stream.readCString(); this.content_type = stream.readCString(); this.content_encoding = stream.readCString(); } if (this.version === 1) { this.extension_type = stream.readString(4); Log.warn("BoxParser", "Cannot parse extension type"); stream.seek(this.start + this.size); return; } if (this.version >= 2) { if (this.version === 2) { this.item_ID = stream.readUint16(); } else if (this.version === 3) { this.item_ID = stream.readUint32(); } this.item_protection_index = stream.readUint16(); this.item_type = stream.readString(4); this.item_name = stream.readCString(); if (this.item_type === "mime") { this.content_type = stream.readCString(); this.content_encoding = stream.readCString(); } else if (this.item_type === "uri ") { this.item_uri_type = stream.readCString(); } } }); // file:src/parsing/ipma.js BoxParser.createFullBoxCtor("ipma", function (stream) { var i, j; entry_count = stream.readUint32(); this.associations = []; for (i = 0; i < entry_count; i++) { var item_assoc = {}; this.associations.push(item_assoc); if (this.version < 1) { item_assoc.id = stream.readUint16(); } else { item_assoc.id = stream.readUint32(); } var association_count = stream.readUint8(); item_assoc.props = []; for (j = 0; j < association_count; j++) { var tmp = stream.readUint8(); var p = {}; item_assoc.props.push(p); p.essential = (tmp & 0x80) >> 7 === 1; if (this.flags & 0x1) { p.property_index = (tmp & 0x7F) << 8 | stream.readUint8(); } else { p.property_index = tmp & 0x7F; } } } }); // file:src/parsing/iref.js BoxParser.createFullBoxCtor("iref", function (stream) { var ret; var box; this.references = []; while (stream.getPosition() < this.start + this.size) { ret = BoxParser.parseOneBox(stream, true, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { if (this.version === 0) { box = new BoxParser.SingleItemTypeReferenceBox(ret.type, ret.size, ret.hdr_size, ret.start); } else { box = new BoxParser.SingleItemTypeReferenceBoxLarge(ret.type, ret.size, ret.hdr_size, ret.start); } if (box.write === BoxParser.Box.prototype.write && box.type !== "mdat") { Log.warn("BoxParser", box.type + " box writing not yet implemented, keeping unparsed data in memory for later write"); box.parseDataAndRewind(stream); } box.parse(stream); this.references.push(box); } else { return; } } }); // file:src/parsing/irot.js BoxParser.createBoxCtor("irot", function (stream) { this.angle = stream.readUint8() & 0x3; }); // file:src/parsing/ispe.js BoxParser.createFullBoxCtor("ispe", function (stream) { this.image_width = stream.readUint32(); this.image_height = stream.readUint32(); }); // file:src/parsing/kind.js BoxParser.createFullBoxCtor("kind", function (stream) { this.schemeURI = stream.readCString(); this.value = stream.readCString(); }); // file:src/parsing/leva.js BoxParser.createFullBoxCtor("leva", function (stream) { var count = stream.readUint8(); this.levels = []; for (var i = 0; i < count; i++) { var level = {}; this.levels[i] = level; level.track_ID = stream.readUint32(); var tmp_byte = stream.readUint8(); level.padding_flag = tmp_byte >> 7; level.assignment_type = tmp_byte & 0x7F; switch (level.assignment_type) { case 0: level.grouping_type = stream.readString(4); break; case 1: level.grouping_type = stream.readString(4); level.grouping_type_parameter = stream.readUint32(); break; case 2: break; case 3: break; case 4: level.sub_track_id = stream.readUint32(); break; default: Log.warn("BoxParser", "Unknown leva assignement type"); } } }); // file:src/parsing/lsel.js BoxParser.createBoxCtor("lsel", function (stream) { this.layer_id = stream.readUint16(); }); // file:src/parsing/maxr.js BoxParser.createBoxCtor("maxr", function (stream) { this.period = stream.readUint32(); this.bytes = stream.readUint32(); }); // file:src/parsing/mdcv.js function ColorPoint(x, y) { this.x = x; this.y = y; } ColorPoint.prototype.toString = function () { return "(" + this.x + "," + this.y + ")"; }; BoxParser.createBoxCtor("mdcv", function (stream) { this.display_primaries = []; this.display_primaries[0] = new ColorPoint(stream.readUint16(), stream.readUint16()); this.display_primaries[1] = new ColorPoint(stream.readUint16(), stream.readUint16()); this.display_primaries[2] = new ColorPoint(stream.readUint16(), stream.readUint16()); this.white_point = new ColorPoint(stream.readUint16(), stream.readUint16()); this.max_display_mastering_luminance = stream.readUint32(); this.min_display_mastering_luminance = stream.readUint32(); }); // file:src/parsing/mdhd.js BoxParser.createFullBoxCtor("mdhd", function (stream) { if (this.version == 1) { this.creation_time = stream.readUint64(); this.modification_time = stream.readUint64(); this.timescale = stream.readUint32(); this.duration = stream.readUint64(); } else { this.creation_time = stream.readUint32(); this.modification_time = stream.readUint32(); this.timescale = stream.readUint32(); this.duration = stream.readUint32(); } this.parseLanguage(stream); stream.readUint16(); }); // file:src/parsing/mehd.js BoxParser.createFullBoxCtor("mehd", function (stream) { if (this.flags & 0x1) { Log.warn("BoxParser", "mehd box incorrectly uses flags set to 1, converting version to 1"); this.version = 1; } if (this.version == 1) { this.fragment_duration = stream.readUint64(); } else { this.fragment_duration = stream.readUint32(); } }); // file:src/parsing/meta.js BoxParser.createFullBoxCtor("meta", function (stream) { this.boxes = []; BoxParser.ContainerBox.prototype.parse.call(this, stream); }); // file:src/parsing/mfhd.js BoxParser.createFullBoxCtor("mfhd", function (stream) { this.sequence_number = stream.readUint32(); }); // file:src/parsing/mfro.js BoxParser.createFullBoxCtor("mfro", function (stream) { this._size = stream.readUint32(); }); // file:src/parsing/mskC.js BoxParser.createFullBoxCtor("mskC", function (stream) { this.bits_per_pixel = stream.readUint8(); }); // file:src/parsing/mvhd.js BoxParser.createFullBoxCtor("mvhd", function (stream) { if (this.version == 1) { this.creation_time = stream.readUint64(); this.modification_time = stream.readUint64(); this.timescale = stream.readUint32(); this.duration = stream.readUint64(); } else { this.creation_time = stream.readUint32(); this.modification_time = stream.readUint32(); this.timescale = stream.readUint32(); this.duration = stream.readUint32(); } this.rate = stream.readUint32(); this.volume = stream.readUint16() >> 8; stream.readUint16(); stream.readUint32Array(2); this.matrix = stream.readUint32Array(9); stream.readUint32Array(6); this.next_track_id = stream.readUint32(); }); // file:src/parsing/npck.js BoxParser.createBoxCtor("npck", function (stream) { this.packetssent = stream.readUint32(); }); // file:src/parsing/nump.js BoxParser.createBoxCtor("nump", function (stream) { this.packetssent = stream.readUint64(); }); // file:src/parsing/padb.js BoxParser.createFullBoxCtor("padb", function (stream) { var sample_count = stream.readUint32(); this.padbits = []; for (var i = 0; i < Math.floor((sample_count + 1) / 2); i++) { this.padbits = stream.readUint8(); } }); // file:src/parsing/pasp.js BoxParser.createBoxCtor("pasp", function (stream) { this.hSpacing = stream.readUint32(); this.vSpacing = stream.readUint32(); }); // file:src/parsing/payl.js BoxParser.createBoxCtor("payl", function (stream) { this.text = stream.readString(this.size - this.hdr_size); }); // file:src/parsing/payt.js BoxParser.createBoxCtor("payt", function (stream) { this.payloadID = stream.readUint32(); var count = stream.readUint8(); this.rtpmap_string = stream.readString(count); }); // file:src/parsing/pdin.js BoxParser.createFullBoxCtor("pdin", function (stream) { var count = (this.size - this.hdr_size) / 8; this.rate = []; this.initial_delay = []; for (var i = 0; i < count; i++) { this.rate[i] = stream.readUint32(); this.initial_delay[i] = stream.readUint32(); } }); // file:src/parsing/pitm.js BoxParser.createFullBoxCtor("pitm", function (stream) { if (this.version === 0) { this.item_id = stream.readUint16(); } else { this.item_id = stream.readUint32(); } }); // file:src/parsing/pixi.js BoxParser.createFullBoxCtor("pixi", function (stream) { var i; this.num_channels = stream.readUint8(); this.bits_per_channels = []; for (i = 0; i < this.num_channels; i++) { this.bits_per_channels[i] = stream.readUint8(); } }); // file:src/parsing/pmax.js BoxParser.createBoxCtor("pmax", function (stream) { this.bytes = stream.readUint32(); }); // file:src/parsing/prdi.js BoxParser.createFullBoxCtor("prdi", function (stream) { this.step_count = stream.readUint16(); this.item_count = []; if (this.flags & 0x2) { for (var i = 0; i < this.step_count; i++) { this.item_count[i] = stream.readUint16(); } } }); // file:src/parsing/prft.js BoxParser.createFullBoxCtor("prft", function (stream) { this.ref_track_id = stream.readUint32(); this.ntp_timestamp = stream.readUint64(); if (this.version === 0) { this.media_time = stream.readUint32(); } else { this.media_time = stream.readUint64(); } }); // file:src/parsing/pssh.js BoxParser.createFullBoxCtor("pssh", function (stream) { this.system_id = BoxParser.parseHex16(stream); if (this.version > 0) { var count = stream.readUint32(); this.kid = []; for (var i = 0; i < count; i++) { this.kid[i] = BoxParser.parseHex16(stream); } } var datasize = stream.readUint32(); if (datasize > 0) { this.data = stream.readUint8Array(datasize); } }); // file:src/parsing/qt/clef.js BoxParser.createFullBoxCtor("clef", function (stream) { this.width = stream.readUint32(); this.height = stream.readUint32(); }); // file:src/parsing/qt/enof.js BoxParser.createFullBoxCtor("enof", function (stream) { this.width = stream.readUint32(); this.height = stream.readUint32(); }); // file:src/parsing/qt/prof.js BoxParser.createFullBoxCtor("prof", function (stream) { this.width = stream.readUint32(); this.height = stream.readUint32(); }); // file:src/parsing/qt/tapt.js BoxParser.createContainerBoxCtor("tapt", null, ["clef", "prof", "enof"]); // file:src/parsing/rtp.js BoxParser.createBoxCtor("rtp ", function (stream) { this.descriptionformat = stream.readString(4); this.sdptext = stream.readString(this.size - this.hdr_size - 4); }); // file:src/parsing/saio.js BoxParser.createFullBoxCtor("saio", function (stream) { if (this.flags & 0x1) { this.aux_info_type = stream.readUint32(); this.aux_info_type_parameter = stream.readUint32(); } var count = stream.readUint32(); this.offset = []; for (var i = 0; i < count; i++) { if (this.version === 0) { this.offset[i] = stream.readUint32(); } else { this.offset[i] = stream.readUint64(); } } }); // file:src/parsing/saiz.js BoxParser.createFullBoxCtor("saiz", function (stream) { if (this.flags & 0x1) { this.aux_info_type = stream.readUint32(); this.aux_info_type_parameter = stream.readUint32(); } this.default_sample_info_size = stream.readUint8(); var count = stream.readUint32(); this.sample_info_size = []; if (this.default_sample_info_size === 0) { for (var i = 0; i < count; i++) { this.sample_info_size[i] = stream.readUint8(); } } }); // file:src/parsing/sampleentries/mett.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_METADATA, "mett", function (stream) { this.parseHeader(stream); this.content_encoding = stream.readCString(); this.mime_format = stream.readCString(); this.parseFooter(stream); }); // file:src/parsing/sampleentries/metx.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_METADATA, "metx", function (stream) { this.parseHeader(stream); this.content_encoding = stream.readCString(); this.namespace = stream.readCString(); this.schema_location = stream.readCString(); this.parseFooter(stream); }); // file:src/parsing/sampleentries/sbtt.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE, "sbtt", function (stream) { this.parseHeader(stream); this.content_encoding = stream.readCString(); this.mime_format = stream.readCString(); this.parseFooter(stream); }); // file:src/parsing/sampleentries/stpp.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE, "stpp", function (stream) { this.parseHeader(stream); this.namespace = stream.readCString(); this.schema_location = stream.readCString(); this.auxiliary_mime_types = stream.readCString(); this.parseFooter(stream); }); // file:src/parsing/sampleentries/stxt.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE, "stxt", function (stream) { this.parseHeader(stream); this.content_encoding = stream.readCString(); this.mime_format = stream.readCString(); this.parseFooter(stream); }); // file:src/parsing/sampleentries/tx3g.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_SUBTITLE, "tx3g", function (stream) { this.parseHeader(stream); this.displayFlags = stream.readUint32(); this.horizontal_justification = stream.readInt8(); this.vertical_justification = stream.readInt8(); this.bg_color_rgba = stream.readUint8Array(4); this.box_record = stream.readInt16Array(4); this.style_record = stream.readUint8Array(12); this.parseFooter(stream); }); // file:src/parsing/sampleentries/wvtt.js BoxParser.createSampleEntryCtor(BoxParser.SAMPLE_ENTRY_TYPE_METADATA, "wvtt", function (stream) { this.parseHeader(stream); this.parseFooter(stream); }); // file:src/parsing/samplegroups/alst.js BoxParser.createSampleGroupCtor("alst", function (stream) { var i; var roll_count = stream.readUint16(); this.first_output_sample = stream.readUint16(); this.sample_offset = []; for (i = 0; i < roll_count; i++) { this.sample_offset[i] = stream.readUint32(); } var remaining = this.description_length - 4 - 4 * roll_count; this.num_output_samples = []; this.num_total_samples = []; for (i = 0; i < remaining / 4; i++) { this.num_output_samples[i] = stream.readUint16(); this.num_total_samples[i] = stream.readUint16(); } }); // file:src/parsing/samplegroups/avll.js BoxParser.createSampleGroupCtor("avll", function (stream) { this.layerNumber = stream.readUint8(); this.accurateStatisticsFlag = stream.readUint8(); this.avgBitRate = stream.readUint16(); this.avgFrameRate = stream.readUint16(); }); // file:src/parsing/samplegroups/avss.js BoxParser.createSampleGroupCtor("avss", function (stream) { this.subSequenceIdentifier = stream.readUint16(); this.layerNumber = stream.readUint8(); var tmp_byte = stream.readUint8(); this.durationFlag = tmp_byte >> 7; this.avgRateFlag = tmp_byte >> 6 & 0x1; if (this.durationFlag) { this.duration = stream.readUint32(); } if (this.avgRateFlag) { this.accurateStatisticsFlag = stream.readUint8(); this.avgBitRate = stream.readUint16(); this.avgFrameRate = stream.readUint16(); } this.dependency = []; var numReferences = stream.readUint8(); for (var i = 0; i < numReferences; i++) { var dependencyInfo = {}; this.dependency.push(dependencyInfo); dependencyInfo.subSeqDirectionFlag = stream.readUint8(); dependencyInfo.layerNumber = stream.readUint8(); dependencyInfo.subSequenceIdentifier = stream.readUint16(); } }); // file:src/parsing/samplegroups/dtrt.js BoxParser.createSampleGroupCtor("dtrt", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/mvif.js BoxParser.createSampleGroupCtor("mvif", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/prol.js BoxParser.createSampleGroupCtor("prol", function (stream) { this.roll_distance = stream.readInt16(); }); // file:src/parsing/samplegroups/rap.js BoxParser.createSampleGroupCtor("rap ", function (stream) { var tmp_byte = stream.readUint8(); this.num_leading_samples_known = tmp_byte >> 7; this.num_leading_samples = tmp_byte & 0x7F; }); // file:src/parsing/samplegroups/rash.js BoxParser.createSampleGroupCtor("rash", function (stream) { this.operation_point_count = stream.readUint16(); if (this.description_length !== 2 + (this.operation_point_count === 1 ? 2 : this.operation_point_count * 6) + 9) { Log.warn("BoxParser", "Mismatch in " + this.grouping_type + " sample group length"); this.data = stream.readUint8Array(this.description_length - 2); } else { if (this.operation_point_count === 1) { this.target_rate_share = stream.readUint16(); } else { this.target_rate_share = []; this.available_bitrate = []; for (var i = 0; i < this.operation_point_count; i++) { this.available_bitrate[i] = stream.readUint32(); this.target_rate_share[i] = stream.readUint16(); } } this.maximum_bitrate = stream.readUint32(); this.minimum_bitrate = stream.readUint32(); this.discard_priority = stream.readUint8(); } }); // file:src/parsing/samplegroups/roll.js BoxParser.createSampleGroupCtor("roll", function (stream) { this.roll_distance = stream.readInt16(); }); // file:src/parsing/samplegroups/samplegroup.js BoxParser.SampleGroupEntry.prototype.parse = function (stream) { Log.warn("BoxParser", "Unknown Sample Group type: " + this.grouping_type); this.data = stream.readUint8Array(this.description_length); }; // file:src/parsing/samplegroups/scif.js BoxParser.createSampleGroupCtor("scif", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/scnm.js BoxParser.createSampleGroupCtor("scnm", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/seig.js BoxParser.createSampleGroupCtor("seig", function (stream) { this.reserved = stream.readUint8(); var tmp = stream.readUint8(); this.crypt_byte_block = tmp >> 4; this.skip_byte_block = tmp & 0xF; this.isProtected = stream.readUint8(); this.Per_Sample_IV_Size = stream.readUint8(); this.KID = BoxParser.parseHex16(stream); this.constant_IV_size = 0; this.constant_IV = 0; if (this.isProtected === 1 && this.Per_Sample_IV_Size === 0) { this.constant_IV_size = stream.readUint8(); this.constant_IV = stream.readUint8Array(this.constant_IV_size); } }); // file:src/parsing/samplegroups/stsa.js BoxParser.createSampleGroupCtor("stsa", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/sync.js BoxParser.createSampleGroupCtor("sync", function (stream) { var tmp_byte = stream.readUint8(); this.NAL_unit_type = tmp_byte & 0x3F; }); // file:src/parsing/samplegroups/tele.js BoxParser.createSampleGroupCtor("tele", function (stream) { var tmp_byte = stream.readUint8(); this.level_independently_decodable = tmp_byte >> 7; }); // file:src/parsing/samplegroups/tsas.js BoxParser.createSampleGroupCtor("tsas", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/tscl.js BoxParser.createSampleGroupCtor("tscl", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/samplegroups/vipr.js BoxParser.createSampleGroupCtor("vipr", function (stream) { Log.warn("BoxParser", "Sample Group type: " + this.grouping_type + " not fully parsed"); }); // file:src/parsing/sbgp.js BoxParser.createFullBoxCtor("sbgp", function (stream) { this.grouping_type = stream.readString(4); if (this.version === 1) { this.grouping_type_parameter = stream.readUint32(); } else { this.grouping_type_parameter = 0; } this.entries = []; var entry_count = stream.readUint32(); for (var i = 0; i < entry_count; i++) { var entry = {}; this.entries.push(entry); entry.sample_count = stream.readInt32(); entry.group_description_index = stream.readInt32(); } }); // file:src/parsing/sbpm.js function Pixel(row, col) { this.bad_pixel_row = row; this.bad_pixel_column = col; } Pixel.prototype.toString = function pixelToString() { return "[row: " + this.bad_pixel_row + ", column: " + this.bad_pixel_column + "]"; }; BoxParser.createFullBoxCtor("sbpm", function (stream) { var i; this.component_count = stream.readUint16(); this.component_index = []; for (i = 0; i < this.component_count; i++) { this.component_index.push(stream.readUint16()); } var flags = stream.readUint8(); this.correction_applied = 0x80 == (flags & 0x80); this.num_bad_rows = stream.readUint32(); this.num_bad_cols = stream.readUint32(); this.num_bad_pixels = stream.readUint32(); this.bad_rows = []; this.bad_columns = []; this.bad_pixels = []; for (i = 0; i < this.num_bad_rows; i++) { this.bad_rows.push(stream.readUint32()); } for (i = 0; i < this.num_bad_cols; i++) { this.bad_columns.push(stream.readUint32()); } for (i = 0; i < this.num_bad_pixels; i++) { var row = stream.readUint32(); var col = stream.readUint32(); this.bad_pixels.push(new Pixel(row, col)); } }); // file:src/parsing/schm.js BoxParser.createFullBoxCtor("schm", function (stream) { this.scheme_type = stream.readString(4); this.scheme_version = stream.readUint32(); if (this.flags & 0x000001) { this.scheme_uri = stream.readString(this.size - this.hdr_size - 8); } }); // file:src/parsing/sdp.js BoxParser.createBoxCtor("sdp ", function (stream) { this.sdptext = stream.readString(this.size - this.hdr_size); }); // file:src/parsing/sdtp.js BoxParser.createFullBoxCtor("sdtp", function (stream) { var tmp_byte; var count = this.size - this.hdr_size; this.is_leading = []; this.sample_depends_on = []; this.sample_is_depended_on = []; this.sample_has_redundancy = []; for (var i = 0; i < count; i++) { tmp_byte = stream.readUint8(); this.is_leading[i] = tmp_byte >> 6; this.sample_depends_on[i] = tmp_byte >> 4 & 0x3; this.sample_is_depended_on[i] = tmp_byte >> 2 & 0x3; this.sample_has_redundancy[i] = tmp_byte & 0x3; } }); // file:src/parsing/senc.js // Cannot be fully parsed because Per_Sample_IV_Size needs to be known BoxParser.createFullBoxCtor("senc" /*, function(stream) { this.parseFullHeader(stream); var sample_count = stream.readUint32(); this.samples = []; for (var i = 0; i < sample_count; i++) { var sample = {}; // tenc.default_Per_Sample_IV_Size or seig.Per_Sample_IV_Size sample.InitializationVector = this.readUint8Array(Per_Sample_IV_Size*8); if (this.flags & 0x2) { sample.subsamples = []; subsample_count = stream.readUint16(); for (var j = 0; j < subsample_count; j++) { var subsample = {}; subsample.BytesOfClearData = stream.readUint16(); subsample.BytesOfProtectedData = stream.readUint32(); sample.subsamples.push(subsample); } } // TODO this.samples.push(sample); } }*/); // file:src/parsing/sgpd.js BoxParser.createFullBoxCtor("sgpd", function (stream) { this.grouping_type = stream.readString(4); Log.debug("BoxParser", "Found Sample Groups of type " + this.grouping_type); if (this.version === 1) { this.default_length = stream.readUint32(); } else { this.default_length = 0; } if (this.version >= 2) { this.default_group_description_index = stream.readUint32(); } this.entries = []; var entry_count = stream.readUint32(); for (var i = 0; i < entry_count; i++) { var entry; if (BoxParser[this.grouping_type + "SampleGroupEntry"]) { entry = new BoxParser[this.grouping_type + "SampleGroupEntry"](this.grouping_type); } else { entry = new BoxParser.SampleGroupEntry(this.grouping_type); } this.entries.push(entry); if (this.version === 1) { if (this.default_length === 0) { entry.description_length = stream.readUint32(); } else { entry.description_length = this.default_length; } } else { entry.description_length = this.default_length; } if (entry.write === BoxParser.SampleGroupEntry.prototype.write) { Log.info("BoxParser", "SampleGroup for type " + this.grouping_type + " writing not yet implemented, keeping unparsed data in memory for later write"); // storing data entry.data = stream.readUint8Array(entry.description_length); // rewinding stream.position -= entry.description_length; } entry.parse(stream); } }); // file:src/parsing/sidx.js BoxParser.createFullBoxCtor("sidx", function (stream) { this.reference_ID = stream.readUint32(); this.timescale = stream.readUint32(); if (this.version === 0) { this.earliest_presentation_time = stream.readUint32(); this.first_offset = stream.readUint32(); } else { this.earliest_presentation_time = stream.readUint64(); this.first_offset = stream.readUint64(); } stream.readUint16(); this.references = []; var count = stream.readUint16(); for (var i = 0; i < count; i++) { var ref = {}; this.references.push(ref); var tmp_32 = stream.readUint32(); ref.reference_type = tmp_32 >> 31 & 0x1; ref.referenced_size = tmp_32 & 0x7FFFFFFF; ref.subsegment_duration = stream.readUint32(); tmp_32 = stream.readUint32(); ref.starts_with_SAP = tmp_32 >> 31 & 0x1; ref.SAP_type = tmp_32 >> 28 & 0x7; ref.SAP_delta_time = tmp_32 & 0xFFFFFFF; } }); // file:src/parsing/singleitemtypereference.js BoxParser.SingleItemTypeReferenceBox = function (type, size, hdr_size, start) { BoxParser.Box.call(this, type, size); this.hdr_size = hdr_size; this.start = start; }; BoxParser.SingleItemTypeReferenceBox.prototype = new BoxParser.Box(); BoxParser.SingleItemTypeReferenceBox.prototype.parse = function (stream) { this.from_item_ID = stream.readUint16(); var count = stream.readUint16(); this.references = []; for (var i = 0; i < count; i++) { this.references[i] = {}; this.references[i].to_item_ID = stream.readUint16(); } }; // file:src/parsing/singleitemtypereferencelarge.js BoxParser.SingleItemTypeReferenceBoxLarge = function (type, size, hdr_size, start) { BoxParser.Box.call(this, type, size); this.hdr_size = hdr_size; this.start = start; }; BoxParser.SingleItemTypeReferenceBoxLarge.prototype = new BoxParser.Box(); BoxParser.SingleItemTypeReferenceBoxLarge.prototype.parse = function (stream) { this.from_item_ID = stream.readUint32(); var count = stream.readUint16(); this.references = []; for (var i = 0; i < count; i++) { this.references[i] = {}; this.references[i].to_item_ID = stream.readUint32(); } }; // file:src/parsing/SmDm.js BoxParser.createFullBoxCtor("SmDm", function (stream) { this.primaryRChromaticity_x = stream.readUint16(); this.primaryRChromaticity_y = stream.readUint16(); this.primaryGChromaticity_x = stream.readUint16(); this.primaryGChromaticity_y = stream.readUint16(); this.primaryBChromaticity_x = stream.readUint16(); this.primaryBChromaticity_y = stream.readUint16(); this.whitePointChromaticity_x = stream.readUint16(); this.whitePointChromaticity_y = stream.readUint16(); this.luminanceMax = stream.readUint32(); this.luminanceMin = stream.readUint32(); }); // file:src/parsing/smhd.js BoxParser.createFullBoxCtor("smhd", function (stream) { this.balance = stream.readUint16(); stream.readUint16(); }); // file:src/parsing/ssix.js BoxParser.createFullBoxCtor("ssix", function (stream) { this.subsegments = []; var subsegment_count = stream.readUint32(); for (var i = 0; i < subsegment_count; i++) { var subsegment = {}; this.subsegments.push(subsegment); subsegment.ranges = []; var range_count = stream.readUint32(); for (var j = 0; j < range_count; j++) { var range = {}; subsegment.ranges.push(range); range.level = stream.readUint8(); range.range_size = stream.readUint24(); } } }); // file:src/parsing/stco.js BoxParser.createFullBoxCtor("stco", function (stream) { var entry_count; entry_count = stream.readUint32(); this.chunk_offsets = []; if (this.version === 0) { for (var i = 0; i < entry_count; i++) { this.chunk_offsets.push(stream.readUint32()); } } }); // file:src/parsing/stdp.js BoxParser.createFullBoxCtor("stdp", function (stream) { var count = (this.size - this.hdr_size) / 2; this.priority = []; for (var i = 0; i < count; i++) { this.priority[i] = stream.readUint16(); } }); // file:src/parsing/sthd.js BoxParser.createFullBoxCtor("sthd"); // file:src/parsing/stri.js BoxParser.createFullBoxCtor("stri", function (stream) { this.switch_group = stream.readUint16(); this.alternate_group = stream.readUint16(); this.sub_track_id = stream.readUint32(); var count = (this.size - this.hdr_size - 8) / 4; this.attribute_list = []; for (var i = 0; i < count; i++) { this.attribute_list[i] = stream.readUint32(); } }); // file:src/parsing/stsc.js BoxParser.createFullBoxCtor("stsc", function (stream) { var entry_count; var i; entry_count = stream.readUint32(); this.first_chunk = []; this.samples_per_chunk = []; this.sample_description_index = []; if (this.version === 0) { for (i = 0; i < entry_count; i++) { this.first_chunk.push(stream.readUint32()); this.samples_per_chunk.push(stream.readUint32()); this.sample_description_index.push(stream.readUint32()); } } }); // file:src/parsing/stsd.js BoxParser.createFullBoxCtor("stsd", function (stream) { var i; var ret; var entryCount; var box; this.entries = []; entryCount = stream.readUint32(); for (i = 1; i <= entryCount; i++) { ret = BoxParser.parseOneBox(stream, true, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { if (BoxParser[ret.type + "SampleEntry"]) { box = new BoxParser[ret.type + "SampleEntry"](ret.size); box.hdr_size = ret.hdr_size; box.start = ret.start; } else { Log.warn("BoxParser", "Unknown sample entry type: " + ret.type); box = new BoxParser.SampleEntry(ret.type, ret.size, ret.hdr_size, ret.start); } if (box.write === BoxParser.SampleEntry.prototype.write) { Log.info("BoxParser", "SampleEntry " + box.type + " box writing not yet implemented, keeping unparsed data in memory for later write"); box.parseDataAndRewind(stream); } box.parse(stream); this.entries.push(box); } else { return; } } }); // file:src/parsing/stsg.js BoxParser.createFullBoxCtor("stsg", function (stream) { this.grouping_type = stream.readUint32(); var count = stream.readUint16(); this.group_description_index = []; for (var i = 0; i < count; i++) { this.group_description_index[i] = stream.readUint32(); } }); // file:src/parsing/stsh.js BoxParser.createFullBoxCtor("stsh", function (stream) { var entry_count; var i; entry_count = stream.readUint32(); this.shadowed_sample_numbers = []; this.sync_sample_numbers = []; if (this.version === 0) { for (i = 0; i < entry_count; i++) { this.shadowed_sample_numbers.push(stream.readUint32()); this.sync_sample_numbers.push(stream.readUint32()); } } }); // file:src/parsing/stss.js BoxParser.createFullBoxCtor("stss", function (stream) { var i; var entry_count; entry_count = stream.readUint32(); if (this.version === 0) { this.sample_numbers = []; for (i = 0; i < entry_count; i++) { this.sample_numbers.push(stream.readUint32()); } } }); // file:src/parsing/stsz.js BoxParser.createFullBoxCtor("stsz", function (stream) { var i; this.sample_sizes = []; if (this.version === 0) { this.sample_size = stream.readUint32(); this.sample_count = stream.readUint32(); for (i = 0; i < this.sample_count; i++) { if (this.sample_size === 0) { this.sample_sizes.push(stream.readUint32()); } else { this.sample_sizes[i] = this.sample_size; } } } }); // file:src/parsing/stts.js BoxParser.createFullBoxCtor("stts", function (stream) { var entry_count; var i; var delta; entry_count = stream.readUint32(); this.sample_counts = []; this.sample_deltas = []; if (this.version === 0) { for (i = 0; i < entry_count; i++) { this.sample_counts.push(stream.readUint32()); delta = stream.readInt32(); if (delta < 0) { Log.warn("BoxParser", "File uses negative stts sample delta, using value 1 instead, sync may be lost!"); delta = 1; } this.sample_deltas.push(delta); } } }); // file:src/parsing/stvi.js BoxParser.createFullBoxCtor("stvi", function (stream) { var tmp32 = stream.readUint32(); this.single_view_allowed = tmp32 & 0x3; this.stereo_scheme = stream.readUint32(); var length = stream.readUint32(); this.stereo_indication_type = stream.readString(length); var ret; var box; this.boxes = []; while (stream.getPosition() < this.start + this.size) { ret = BoxParser.parseOneBox(stream, false, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { box = ret.box; this.boxes.push(box); this[box.type] = box; } else { return; } } }); // file:src/parsing/styp.js BoxParser.createBoxCtor("styp", function (stream) { BoxParser.ftypBox.prototype.parse.call(this, stream); }); // file:src/parsing/stz2.js BoxParser.createFullBoxCtor("stz2", function (stream) { var i; var sample_count; this.sample_sizes = []; if (this.version === 0) { this.reserved = stream.readUint24(); this.field_size = stream.readUint8(); sample_count = stream.readUint32(); if (this.field_size === 4) { for (i = 0; i < sample_count; i += 2) { var tmp = stream.readUint8(); this.sample_sizes[i] = tmp >> 4 & 0xF; this.sample_sizes[i + 1] = tmp & 0xF; } } else if (this.field_size === 8) { for (i = 0; i < sample_count; i++) { this.sample_sizes[i] = stream.readUint8(); } } else if (this.field_size === 16) { for (i = 0; i < sample_count; i++) { this.sample_sizes[i] = stream.readUint16(); } } else { Log.error("BoxParser", "Error in length field in stz2 box"); } } }); // file:src/parsing/subs.js BoxParser.createFullBoxCtor("subs", function (stream) { var i, j; var entry_count; var subsample_count; entry_count = stream.readUint32(); this.entries = []; for (i = 0; i < entry_count; i++) { var sampleInfo = {}; this.entries[i] = sampleInfo; sampleInfo.sample_delta = stream.readUint32(); sampleInfo.subsamples = []; subsample_count = stream.readUint16(); if (subsample_count > 0) { for (j = 0; j < subsample_count; j++) { var subsample = {}; sampleInfo.subsamples.push(subsample); if (this.version == 1) { subsample.size = stream.readUint32(); } else { subsample.size = stream.readUint16(); } subsample.priority = stream.readUint8(); subsample.discardable = stream.readUint8(); subsample.codec_specific_parameters = stream.readUint32(); } } } }); // file:src/parsing/tenc.js BoxParser.createFullBoxCtor("tenc", function (stream) { stream.readUint8(); // reserved if (this.version === 0) { stream.readUint8(); } else { var tmp = stream.readUint8(); this.default_crypt_byte_block = tmp >> 4 & 0xF; this.default_skip_byte_block = tmp & 0xF; } this.default_isProtected = stream.readUint8(); this.default_Per_Sample_IV_Size = stream.readUint8(); this.default_KID = BoxParser.parseHex16(stream); if (this.default_isProtected === 1 && this.default_Per_Sample_IV_Size === 0) { this.default_constant_IV_size = stream.readUint8(); this.default_constant_IV = stream.readUint8Array(this.default_constant_IV_size); } }); // file:src/parsing/tfdt.js BoxParser.createFullBoxCtor("tfdt", function (stream) { if (this.version == 1) { this.baseMediaDecodeTime = stream.readUint64(); } else { this.baseMediaDecodeTime = stream.readUint32(); } }); // file:src/parsing/tfhd.js BoxParser.createFullBoxCtor("tfhd", function (stream) { var readBytes = 0; this.track_id = stream.readUint32(); if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TFHD_FLAG_BASE_DATA_OFFSET) { this.base_data_offset = stream.readUint64(); readBytes += 8; } else { this.base_data_offset = 0; } if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TFHD_FLAG_SAMPLE_DESC) { this.default_sample_description_index = stream.readUint32(); readBytes += 4; } else { this.default_sample_description_index = 0; } if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TFHD_FLAG_SAMPLE_DUR) { this.default_sample_duration = stream.readUint32(); readBytes += 4; } else { this.default_sample_duration = 0; } if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TFHD_FLAG_SAMPLE_SIZE) { this.default_sample_size = stream.readUint32(); readBytes += 4; } else { this.default_sample_size = 0; } if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TFHD_FLAG_SAMPLE_FLAGS) { this.default_sample_flags = stream.readUint32(); readBytes += 4; } else { this.default_sample_flags = 0; } }); // file:src/parsing/tfra.js BoxParser.createFullBoxCtor("tfra", function (stream) { this.track_ID = stream.readUint32(); stream.readUint24(); var tmp_byte = stream.readUint8(); this.length_size_of_traf_num = tmp_byte >> 4 & 0x3; this.length_size_of_trun_num = tmp_byte >> 2 & 0x3; this.length_size_of_sample_num = tmp_byte & 0x3; this.entries = []; var number_of_entries = stream.readUint32(); for (var i = 0; i < number_of_entries; i++) { if (this.version === 1) { this.time = stream.readUint64(); this.moof_offset = stream.readUint64(); } else { this.time = stream.readUint32(); this.moof_offset = stream.readUint32(); } this.traf_number = stream["readUint" + 8 * (this.length_size_of_traf_num + 1)](); this.trun_number = stream["readUint" + 8 * (this.length_size_of_trun_num + 1)](); this.sample_number = stream["readUint" + 8 * (this.length_size_of_sample_num + 1)](); } }); // file:src/parsing/tkhd.js BoxParser.createFullBoxCtor("tkhd", function (stream) { if (this.version == 1) { this.creation_time = stream.readUint64(); this.modification_time = stream.readUint64(); this.track_id = stream.readUint32(); stream.readUint32(); this.duration = stream.readUint64(); } else { this.creation_time = stream.readUint32(); this.modification_time = stream.readUint32(); this.track_id = stream.readUint32(); stream.readUint32(); this.duration = stream.readUint32(); } stream.readUint32Array(2); this.layer = stream.readInt16(); this.alternate_group = stream.readInt16(); this.volume = stream.readInt16() >> 8; stream.readUint16(); this.matrix = stream.readInt32Array(9); this.width = stream.readUint32(); this.height = stream.readUint32(); }); // file:src/parsing/tmax.js BoxParser.createBoxCtor("tmax", function (stream) { this.time = stream.readUint32(); }); // file:src/parsing/tmin.js BoxParser.createBoxCtor("tmin", function (stream) { this.time = stream.readUint32(); }); // file:src/parsing/totl.js BoxParser.createBoxCtor("totl", function (stream) { this.bytessent = stream.readUint32(); }); // file:src/parsing/tpay.js BoxParser.createBoxCtor("tpay", function (stream) { this.bytessent = stream.readUint32(); }); // file:src/parsing/tpyl.js BoxParser.createBoxCtor("tpyl", function (stream) { this.bytessent = stream.readUint64(); }); // file:src/parsing/TrackGroup.js BoxParser.TrackGroupTypeBox.prototype.parse = function (stream) { this.parseFullHeader(stream); this.track_group_id = stream.readUint32(); }; // file:src/parsing/trackgroups/msrc.js BoxParser.createTrackGroupCtor("msrc"); // file:src/parsing/TrakReference.js BoxParser.TrackReferenceTypeBox = function (type, size, hdr_size, start) { BoxParser.Box.call(this, type, size); this.hdr_size = hdr_size; this.start = start; }; BoxParser.TrackReferenceTypeBox.prototype = new BoxParser.Box(); BoxParser.TrackReferenceTypeBox.prototype.parse = function (stream) { this.track_ids = stream.readUint32Array((this.size - this.hdr_size) / 4); }; // file:src/parsing/tref.js BoxParser.trefBox.prototype.parse = function (stream) { var ret; var box; while (stream.getPosition() < this.start + this.size) { ret = BoxParser.parseOneBox(stream, true, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { box = new BoxParser.TrackReferenceTypeBox(ret.type, ret.size, ret.hdr_size, ret.start); if (box.write === BoxParser.Box.prototype.write && box.type !== "mdat") { Log.info("BoxParser", "TrackReference " + box.type + " box writing not yet implemented, keeping unparsed data in memory for later write"); box.parseDataAndRewind(stream); } box.parse(stream); this.boxes.push(box); } else { return; } } }; // file:src/parsing/trep.js BoxParser.createFullBoxCtor("trep", function (stream) { this.track_ID = stream.readUint32(); this.boxes = []; while (stream.getPosition() < this.start + this.size) { ret = BoxParser.parseOneBox(stream, false, this.size - (stream.getPosition() - this.start)); if (ret.code === BoxParser.OK) { box = ret.box; this.boxes.push(box); } else { return; } } }); // file:src/parsing/trex.js BoxParser.createFullBoxCtor("trex", function (stream) { this.track_id = stream.readUint32(); this.default_sample_description_index = stream.readUint32(); this.default_sample_duration = stream.readUint32(); this.default_sample_size = stream.readUint32(); this.default_sample_flags = stream.readUint32(); }); // file:src/parsing/trpy.js BoxParser.createBoxCtor("trpy", function (stream) { this.bytessent = stream.readUint64(); }); // file:src/parsing/trun.js BoxParser.createFullBoxCtor("trun", function (stream) { var readBytes = 0; this.sample_count = stream.readUint32(); readBytes += 4; if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TRUN_FLAGS_DATA_OFFSET) { this.data_offset = stream.readInt32(); //signed readBytes += 4; } else { this.data_offset = 0; } if (this.size - this.hdr_size > readBytes && this.flags & BoxParser.TRUN_FLAGS_FIRST_FLAG) { this.first_sample_flags = stream.readUint32(); readBytes += 4; } else { this.first_sample_flags = 0; } this.sample_duration = []; this.sample_size = []; this.sample_flags = []; this.sample_composition_time_offset = []; if (this.size - this.hdr_size > readBytes) { for (var i = 0; i < this.sample_count; i++) { if (this.flags & BoxParser.TRUN_FLAGS_DURATION) { this.sample_duration[i] = stream.readUint32(); } if (this.flags & BoxParser.TRUN_FLAGS_SIZE) { this.sample_size[i] = stream.readUint32(); } if (this.flags & BoxParser.TRUN_FLAGS_FLAGS) { this.sample_flags[i] = stream.readUint32(); } if (this.flags & BoxParser.TRUN_FLAGS_CTS_OFFSET) { if (this.version === 0) { this.sample_composition_time_offset[i] = stream.readUint32(); } else { this.sample_composition_time_offset[i] = stream.readInt32(); //signed } } } } }); // file:src/parsing/tsel.js BoxParser.createFullBoxCtor("tsel", function (stream) { this.switch_group = stream.readUint32(); var count = (this.size - this.hdr_size - 4) / 4; this.attribute_list = []; for (var i = 0; i < count; i++) { this.attribute_list[i] = stream.readUint32(); } }); // file:src/parsing/txtC.js BoxParser.createFullBoxCtor("txtC", function (stream) { this.config = stream.readCString(); }); // file:src/parsing/tyco.js BoxParser.createBoxCtor("tyco", function (stream) { var count = (this.size - this.hdr_size) / 4; this.compatible_brands = []; for (var i = 0; i < count; i++) { this.compatible_brands[i] = stream.readString(4); } }); // file:src/parsing/udes.js BoxParser.createFullBoxCtor("udes", function (stream) { this.lang = stream.readCString(); this.name = stream.readCString(); this.description = stream.readCString(); this.tags = stream.readCString(); }); // file:src/parsing/uncC.js BoxParser.createFullBoxCtor("uncC", function (stream) { var i; this.profile = stream.readUint32(); if (this.version == 1) ; else if (this.version == 0) { this.component_count = stream.readUint32(); this.component_index = []; this.component_bit_depth_minus_one = []; this.component_format = []; this.component_align_size = []; for (i = 0; i < this.component_count; i++) { this.component_index.push(stream.readUint16()); this.component_bit_depth_minus_one.push(stream.readUint8()); this.component_format.push(stream.readUint8()); this.component_align_size.push(stream.readUint8()); } this.sampling_type = stream.readUint8(); this.interleave_type = stream.readUint8(); this.block_size = stream.readUint8(); var flags = stream.readUint8(); this.component_little_endian = flags >> 7 & 0x1; this.block_pad_lsb = flags >> 6 & 0x1; this.block_little_endian = flags >> 5 & 0x1; this.block_reversed = flags >> 4 & 0x1; this.pad_unknown = flags >> 3 & 0x1; this.pixel_size = stream.readUint32(); this.row_align_size = stream.readUint32(); this.tile_align_size = stream.readUint32(); this.num_tile_cols_minus_one = stream.readUint32(); this.num_tile_rows_minus_one = stream.readUint32(); } }); // file:src/parsing/url.js BoxParser.createFullBoxCtor("url ", function (stream) { if (this.flags !== 0x000001) { this.location = stream.readCString(); } }); // file:src/parsing/urn.js BoxParser.createFullBoxCtor("urn ", function (stream) { this.name = stream.readCString(); if (this.size - this.hdr_size - this.name.length - 1 > 0) { this.location = stream.readCString(); } }); // file:src/parsing/uuid/piff/piffLsm.js BoxParser.createUUIDBox("a5d40b30e81411ddba2f0800200c9a66", true, false, function (stream) { this.LiveServerManifest = stream.readString(this.size - this.hdr_size).replace(/&/g, "&").replace(//g, ">").replace(/"/g, """).replace(/'/g, "'"); }); // file:src/parsing/uuid/piff/piffPssh.js BoxParser.createUUIDBox("d08a4f1810f34a82b6c832d8aba183d3", true, false, function (stream) { this.system_id = BoxParser.parseHex16(stream); var datasize = stream.readUint32(); if (datasize > 0) { this.data = stream.readUint8Array(datasize); } }); // file:src/parsing/uuid/piff/piffSenc.js BoxParser.createUUIDBox("a2394f525a9b4f14a2446c427c648df4", true, false /*, function(stream) { if (this.flags & 0x1) { this.AlgorithmID = stream.readUint24(); this.IV_size = stream.readUint8(); this.KID = BoxParser.parseHex16(stream); } var sample_count = stream.readUint32(); this.samples = []; for (var i = 0; i < sample_count; i++) { var sample = {}; sample.InitializationVector = this.readUint8Array(this.IV_size*8); if (this.flags & 0x2) { sample.subsamples = []; sample.NumberOfEntries = stream.readUint16(); for (var j = 0; j < sample.NumberOfEntries; j++) { var subsample = {}; subsample.BytesOfClearData = stream.readUint16(); subsample.BytesOfProtectedData = stream.readUint32(); sample.subsamples.push(subsample); } } this.samples.push(sample); } }*/); // file:src/parsing/uuid/piff/piffTenc.js BoxParser.createUUIDBox("8974dbce7be74c5184f97148f9882554", true, false, function (stream) { this.default_AlgorithmID = stream.readUint24(); this.default_IV_size = stream.readUint8(); this.default_KID = BoxParser.parseHex16(stream); }); // file:src/parsing/uuid/piff/piffTfrf.js BoxParser.createUUIDBox("d4807ef2ca3946958e5426cb9e46a79f", true, false, function (stream) { this.fragment_count = stream.readUint8(); this.entries = []; for (var i = 0; i < this.fragment_count; i++) { var entry = {}; var absolute_time = 0; var absolute_duration = 0; if (this.version === 1) { absolute_time = stream.readUint64(); absolute_duration = stream.readUint64(); } else { absolute_time = stream.readUint32(); absolute_duration = stream.readUint32(); } entry.absolute_time = absolute_time; entry.absolute_duration = absolute_duration; this.entries.push(entry); } }); // file:src/parsing/uuid/piff/piffTfxd.js BoxParser.createUUIDBox("6d1d9b0542d544e680e2141daff757b2", true, false, function (stream) { if (this.version === 1) { this.absolute_time = stream.readUint64(); this.duration = stream.readUint64(); } else { this.absolute_time = stream.readUint32(); this.duration = stream.readUint32(); } }); // file:src/parsing/vmhd.js BoxParser.createFullBoxCtor("vmhd", function (stream) { this.graphicsmode = stream.readUint16(); this.opcolor = stream.readUint16Array(3); }); // file:src/parsing/vpcC.js BoxParser.createFullBoxCtor("vpcC", function (stream) { var tmp; if (this.version === 1) { this.profile = stream.readUint8(); this.level = stream.readUint8(); tmp = stream.readUint8(); this.bitDepth = tmp >> 4; this.chromaSubsampling = tmp >> 1 & 0x7; this.videoFullRangeFlag = tmp & 0x1; this.colourPrimaries = stream.readUint8(); this.transferCharacteristics = stream.readUint8(); this.matrixCoefficients = stream.readUint8(); this.codecIntializationDataSize = stream.readUint16(); this.codecIntializationData = stream.readUint8Array(this.codecIntializationDataSize); } else { this.profile = stream.readUint8(); this.level = stream.readUint8(); tmp = stream.readUint8(); this.bitDepth = tmp >> 4 & 0xF; this.colorSpace = tmp & 0xF; tmp = stream.readUint8(); this.chromaSubsampling = tmp >> 4 & 0xF; this.transferFunction = tmp >> 1 & 0x7; this.videoFullRangeFlag = tmp & 0x1; this.codecIntializationDataSize = stream.readUint16(); this.codecIntializationData = stream.readUint8Array(this.codecIntializationDataSize); } }); // file:src/parsing/vttC.js BoxParser.createBoxCtor("vttC", function (stream) { this.text = stream.readString(this.size - this.hdr_size); }); // file:src/parsing/vvcC.js BoxParser.createFullBoxCtor("vvcC", function (stream) { var i, j; // helper object to simplify extracting individual bits var bitReader = { held_bits: undefined, num_held_bits: 0, stream_read_1_bytes: function (strm) { this.held_bits = strm.readUint8(); this.num_held_bits = 1 * 8; }, stream_read_2_bytes: function (strm) { this.held_bits = strm.readUint16(); this.num_held_bits = 2 * 8; }, extract_bits: function (num_bits) { var ret = this.held_bits >> this.num_held_bits - num_bits & (1 << num_bits) - 1; this.num_held_bits -= num_bits; return ret; } }; // VvcDecoderConfigurationRecord bitReader.stream_read_1_bytes(stream); bitReader.extract_bits(5); // reserved this.lengthSizeMinusOne = bitReader.extract_bits(2); this.ptl_present_flag = bitReader.extract_bits(1); if (this.ptl_present_flag) { bitReader.stream_read_2_bytes(stream); this.ols_idx = bitReader.extract_bits(9); this.num_sublayers = bitReader.extract_bits(3); this.constant_frame_rate = bitReader.extract_bits(2); this.chroma_format_idc = bitReader.extract_bits(2); bitReader.stream_read_1_bytes(stream); this.bit_depth_minus8 = bitReader.extract_bits(3); bitReader.extract_bits(5); // reserved // VvcPTLRecord { bitReader.stream_read_2_bytes(stream); bitReader.extract_bits(2); // reserved this.num_bytes_constraint_info = bitReader.extract_bits(6); this.general_profile_idc = bitReader.extract_bits(7); this.general_tier_flag = bitReader.extract_bits(1); this.general_level_idc = stream.readUint8(); bitReader.stream_read_1_bytes(stream); this.ptl_frame_only_constraint_flag = bitReader.extract_bits(1); this.ptl_multilayer_enabled_flag = bitReader.extract_bits(1); this.general_constraint_info = new Uint8Array(this.num_bytes_constraint_info); if (this.num_bytes_constraint_info) { for (i = 0; i < this.num_bytes_constraint_info - 1; i++) { var cnstr1 = bitReader.extract_bits(6); bitReader.stream_read_1_bytes(stream); var cnstr2 = bitReader.extract_bits(2); this.general_constraint_info[i] = cnstr1 << 2 | cnstr2; } this.general_constraint_info[this.num_bytes_constraint_info - 1] = bitReader.extract_bits(6); } else { //forbidden in spec! bitReader.extract_bits(6); } if (this.num_sublayers > 1) { bitReader.stream_read_1_bytes(stream); this.ptl_sublayer_present_mask = 0; for (j = this.num_sublayers - 2; j >= 0; --j) { var val = bitReader.extract_bits(1); this.ptl_sublayer_present_mask |= val << j; } for (j = this.num_sublayers; j <= 8 && this.num_sublayers > 1; ++j) { bitReader.extract_bits(1); // ptl_reserved_zero_bit } this.sublayer_level_idc = []; for (j = this.num_sublayers - 2; j >= 0; --j) { if (this.ptl_sublayer_present_mask & 1 << j) { this.sublayer_level_idc[j] = stream.readUint8(); } } } this.ptl_num_sub_profiles = stream.readUint8(); this.general_sub_profile_idc = []; if (this.ptl_num_sub_profiles) { for (i = 0; i < this.ptl_num_sub_profiles; i++) { this.general_sub_profile_idc.push(stream.readUint32()); } } } // end VvcPTLRecord this.max_picture_width = stream.readUint16(); this.max_picture_height = stream.readUint16(); this.avg_frame_rate = stream.readUint16(); } var VVC_NALU_OPI = 12; var VVC_NALU_DEC_PARAM = 13; this.nalu_arrays = []; var num_of_arrays = stream.readUint8(); for (i = 0; i < num_of_arrays; i++) { var nalu_array = []; this.nalu_arrays.push(nalu_array); bitReader.stream_read_1_bytes(stream); nalu_array.completeness = bitReader.extract_bits(1); bitReader.extract_bits(2); // reserved nalu_array.nalu_type = bitReader.extract_bits(5); var numNalus = 1; if (nalu_array.nalu_type != VVC_NALU_DEC_PARAM && nalu_array.nalu_type != VVC_NALU_OPI) { numNalus = stream.readUint16(); } for (j = 0; j < numNalus; j++) { var len = stream.readUint16(); nalu_array.push({ data: stream.readUint8Array(len), length: len }); } } }); // file:src/parsing/vvnC.js BoxParser.createFullBoxCtor("vvnC", function (stream) { // VvcNALUConfigBox var tmp = strm.readUint8(); this.lengthSizeMinusOne = tmp & 0x3; }); // file:src/box-codecs.js BoxParser.SampleEntry.prototype.isVideo = function () { return false; }; BoxParser.SampleEntry.prototype.isAudio = function () { return false; }; BoxParser.SampleEntry.prototype.isSubtitle = function () { return false; }; BoxParser.SampleEntry.prototype.isMetadata = function () { return false; }; BoxParser.SampleEntry.prototype.isHint = function () { return false; }; BoxParser.SampleEntry.prototype.getCodec = function () { return this.type.replace('.', ''); }; BoxParser.SampleEntry.prototype.getWidth = function () { return ""; }; BoxParser.SampleEntry.prototype.getHeight = function () { return ""; }; BoxParser.SampleEntry.prototype.getChannelCount = function () { return ""; }; BoxParser.SampleEntry.prototype.getSampleRate = function () { return ""; }; BoxParser.SampleEntry.prototype.getSampleSize = function () { return ""; }; BoxParser.VisualSampleEntry.prototype.isVideo = function () { return true; }; BoxParser.VisualSampleEntry.prototype.getWidth = function () { return this.width; }; BoxParser.VisualSampleEntry.prototype.getHeight = function () { return this.height; }; BoxParser.AudioSampleEntry.prototype.isAudio = function () { return true; }; BoxParser.AudioSampleEntry.prototype.getChannelCount = function () { return this.channel_count; }; BoxParser.AudioSampleEntry.prototype.getSampleRate = function () { return this.samplerate; }; BoxParser.AudioSampleEntry.prototype.getSampleSize = function () { return this.samplesize; }; BoxParser.SubtitleSampleEntry.prototype.isSubtitle = function () { return true; }; BoxParser.MetadataSampleEntry.prototype.isMetadata = function () { return true; }; BoxParser.decimalToHex = function (d, padding) { var hex = Number(d).toString(16); padding = typeof padding === "undefined" || padding === null ? padding = 2 : padding; while (hex.length < padding) { hex = "0" + hex; } return hex; }; BoxParser.avc1SampleEntry.prototype.getCodec = BoxParser.avc2SampleEntry.prototype.getCodec = BoxParser.avc3SampleEntry.prototype.getCodec = BoxParser.avc4SampleEntry.prototype.getCodec = function () { var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); if (this.avcC) { return baseCodec + "." + BoxParser.decimalToHex(this.avcC.AVCProfileIndication) + "" + BoxParser.decimalToHex(this.avcC.profile_compatibility) + "" + BoxParser.decimalToHex(this.avcC.AVCLevelIndication); } else { return baseCodec; } }; BoxParser.hev1SampleEntry.prototype.getCodec = BoxParser.hvc1SampleEntry.prototype.getCodec = function () { var i; var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); if (this.hvcC) { baseCodec += '.'; switch (this.hvcC.general_profile_space) { case 0: baseCodec += ''; break; case 1: baseCodec += 'A'; break; case 2: baseCodec += 'B'; break; case 3: baseCodec += 'C'; break; } baseCodec += this.hvcC.general_profile_idc; baseCodec += '.'; var val = this.hvcC.general_profile_compatibility; var reversed = 0; for (i = 0; i < 32; i++) { reversed |= val & 1; if (i == 31) break; reversed <<= 1; val >>= 1; } baseCodec += BoxParser.decimalToHex(reversed, 0); baseCodec += '.'; if (this.hvcC.general_tier_flag === 0) { baseCodec += 'L'; } else { baseCodec += 'H'; } baseCodec += this.hvcC.general_level_idc; var hasByte = false; var constraint_string = ""; for (i = 5; i >= 0; i--) { if (this.hvcC.general_constraint_indicator[i] || hasByte) { constraint_string = "." + BoxParser.decimalToHex(this.hvcC.general_constraint_indicator[i], 0) + constraint_string; hasByte = true; } } baseCodec += constraint_string; } return baseCodec; }; BoxParser.vvc1SampleEntry.prototype.getCodec = BoxParser.vvi1SampleEntry.prototype.getCodec = function () { var i; var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); if (this.vvcC) { baseCodec += '.' + this.vvcC.general_profile_idc; if (this.vvcC.general_tier_flag) { baseCodec += '.H'; } else { baseCodec += '.L'; } baseCodec += this.vvcC.general_level_idc; var constraint_string = ""; if (this.vvcC.general_constraint_info) { var bytes = []; var byte = 0; byte |= this.vvcC.ptl_frame_only_constraint << 7; byte |= this.vvcC.ptl_multilayer_enabled << 6; var last_nonzero; for (i = 0; i < this.vvcC.general_constraint_info.length; ++i) { byte |= this.vvcC.general_constraint_info[i] >> 2 & 0x3f; bytes.push(byte); if (byte) { last_nonzero = i; } byte = this.vvcC.general_constraint_info[i] >> 2 & 0x03; } if (last_nonzero === undefined) { constraint_string = ".CA"; } else { constraint_string = ".C"; var base32_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; var held_bits = 0; var num_held_bits = 0; for (i = 0; i <= last_nonzero; ++i) { held_bits = held_bits << 8 | bytes[i]; num_held_bits += 8; while (num_held_bits >= 5) { var val = held_bits >> num_held_bits - 5 & 0x1f; constraint_string += base32_chars[val]; num_held_bits -= 5; held_bits &= (1 << num_held_bits) - 1; } } if (num_held_bits) { held_bits <<= 5 - num_held_bits; // right-pad with zeros to 5 bits (is this correct?) constraint_string += base32_chars[held_bits & 0x1f]; } } } baseCodec += constraint_string; } return baseCodec; }; BoxParser.mp4aSampleEntry.prototype.getCodec = function () { var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); if (this.esds && this.esds.esd) { var oti = this.esds.esd.getOTI(); var dsi = this.esds.esd.getAudioConfig(); return baseCodec + "." + BoxParser.decimalToHex(oti) + (dsi ? "." + dsi : ""); } else { return baseCodec; } }; BoxParser.stxtSampleEntry.prototype.getCodec = function () { var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); if (this.mime_format) { return baseCodec + "." + this.mime_format; } else { return baseCodec; } }; BoxParser.vp08SampleEntry.prototype.getCodec = BoxParser.vp09SampleEntry.prototype.getCodec = function () { var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); var level = this.vpcC.level; if (level == 0) { level = "00"; } var bitDepth = this.vpcC.bitDepth; if (bitDepth == 8) { bitDepth = "08"; } return baseCodec + ".0" + this.vpcC.profile + "." + level + "." + bitDepth; }; BoxParser.av01SampleEntry.prototype.getCodec = function () { var baseCodec = BoxParser.SampleEntry.prototype.getCodec.call(this); var level = this.av1C.seq_level_idx_0; if (level < 10) { level = "0" + level; } var bitdepth; if (this.av1C.seq_profile === 2 && this.av1C.high_bitdepth === 1) { bitdepth = this.av1C.twelve_bit === 1 ? "12" : "10"; } else if (this.av1C.seq_profile <= 2) { bitdepth = this.av1C.high_bitdepth === 1 ? "10" : "08"; } // TODO need to parse the SH to find color config return baseCodec + "." + this.av1C.seq_profile + "." + level + (this.av1C.seq_tier_0 ? "H" : "M") + "." + bitdepth; //+"."+this.av1C.monochrome+"."+this.av1C.chroma_subsampling_x+""+this.av1C.chroma_subsampling_y+""+this.av1C.chroma_sample_position; }; // file:src/box-write.js /* * Copyright (c) Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ BoxParser.Box.prototype.writeHeader = function (stream, msg) { this.size += 8; if (this.size > MAX_SIZE) { this.size += 8; } if (this.type === "uuid") { this.size += 16; } Log.debug("BoxWriter", "Writing box " + this.type + " of size: " + this.size + " at position " + stream.getPosition() + (msg || "")); if (this.size > MAX_SIZE) { stream.writeUint32(1); } else { this.sizePosition = stream.getPosition(); stream.writeUint32(this.size); } stream.writeString(this.type, null, 4); if (this.type === "uuid") { stream.writeUint8Array(this.uuid); } if (this.size > MAX_SIZE) { stream.writeUint64(this.size); } }; BoxParser.FullBox.prototype.writeHeader = function (stream) { this.size += 4; BoxParser.Box.prototype.writeHeader.call(this, stream, " v=" + this.version + " f=" + this.flags); stream.writeUint8(this.version); stream.writeUint24(this.flags); }; BoxParser.Box.prototype.write = function (stream) { if (this.type === "mdat") { /* TODO: fix this */ if (this.data) { this.size = this.data.length; this.writeHeader(stream); stream.writeUint8Array(this.data); } } else { this.size = this.data ? this.data.length : 0; this.writeHeader(stream); if (this.data) { stream.writeUint8Array(this.data); } } }; BoxParser.ContainerBox.prototype.write = function (stream) { this.size = 0; this.writeHeader(stream); for (var i = 0; i < this.boxes.length; i++) { if (this.boxes[i]) { this.boxes[i].write(stream); this.size += this.boxes[i].size; } } /* adjusting the size, now that all sub-boxes are known */ Log.debug("BoxWriter", "Adjusting box " + this.type + " with new size " + this.size); stream.adjustUint32(this.sizePosition, this.size); }; BoxParser.TrackReferenceTypeBox.prototype.write = function (stream) { this.size = this.track_ids.length * 4; this.writeHeader(stream); stream.writeUint32Array(this.track_ids); }; // file:src/writing/avcC.js BoxParser.avcCBox.prototype.write = function (stream) { var i; this.size = 7; for (i = 0; i < this.SPS.length; i++) { this.size += 2 + this.SPS[i].length; } for (i = 0; i < this.PPS.length; i++) { this.size += 2 + this.PPS[i].length; } if (this.ext) { this.size += this.ext.length; } this.writeHeader(stream); stream.writeUint8(this.configurationVersion); stream.writeUint8(this.AVCProfileIndication); stream.writeUint8(this.profile_compatibility); stream.writeUint8(this.AVCLevelIndication); stream.writeUint8(this.lengthSizeMinusOne + (63 << 2)); stream.writeUint8(this.SPS.length + (7 << 5)); for (i = 0; i < this.SPS.length; i++) { stream.writeUint16(this.SPS[i].length); stream.writeUint8Array(this.SPS[i].nalu); } stream.writeUint8(this.PPS.length); for (i = 0; i < this.PPS.length; i++) { stream.writeUint16(this.PPS[i].length); stream.writeUint8Array(this.PPS[i].nalu); } if (this.ext) { stream.writeUint8Array(this.ext); } }; // file:src/writing/co64.js BoxParser.co64Box.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 4 + 8 * this.chunk_offsets.length; this.writeHeader(stream); stream.writeUint32(this.chunk_offsets.length); for (i = 0; i < this.chunk_offsets.length; i++) { stream.writeUint64(this.chunk_offsets[i]); } }; // file:src/writing/cslg.js BoxParser.cslgBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 * 5; this.writeHeader(stream); stream.writeInt32(this.compositionToDTSShift); stream.writeInt32(this.leastDecodeToDisplayDelta); stream.writeInt32(this.greatestDecodeToDisplayDelta); stream.writeInt32(this.compositionStartTime); stream.writeInt32(this.compositionEndTime); }; // file:src/writing/ctts.js BoxParser.cttsBox.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 4 + 8 * this.sample_counts.length; this.writeHeader(stream); stream.writeUint32(this.sample_counts.length); for (i = 0; i < this.sample_counts.length; i++) { stream.writeUint32(this.sample_counts[i]); if (this.version === 1) { stream.writeInt32(this.sample_offsets[i]); /* signed */ } else { stream.writeUint32(this.sample_offsets[i]); /* unsigned */ } } }; // file:src/writing/dref.js BoxParser.drefBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4; // this.writeHeader(stream); stream.writeUint32(this.entries.length); for (var i = 0; i < this.entries.length; i++) { this.entries[i].write(stream); this.size += this.entries[i].size; } /* adjusting the size, now that all sub-boxes are known */ Log.debug("BoxWriter", "Adjusting box " + this.type + " with new size " + this.size); stream.adjustUint32(this.sizePosition, this.size); }; // file:src/writing/elng.js BoxParser.elngBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = this.extended_language.length; this.writeHeader(stream); stream.writeString(this.extended_language); }; // file:src/writing/elst.js BoxParser.elstBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 + 12 * this.entries.length; this.writeHeader(stream); stream.writeUint32(this.entries.length); for (var i = 0; i < this.entries.length; i++) { var entry = this.entries[i]; stream.writeUint32(entry.segment_duration); stream.writeInt32(entry.media_time); stream.writeInt16(entry.media_rate_integer); stream.writeInt16(entry.media_rate_fraction); } }; // file:src/writing/emsg.js BoxParser.emsgBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 * 4 + this.message_data.length + (this.scheme_id_uri.length + 1) + (this.value.length + 1); this.writeHeader(stream); stream.writeCString(this.scheme_id_uri); stream.writeCString(this.value); stream.writeUint32(this.timescale); stream.writeUint32(this.presentation_time_delta); stream.writeUint32(this.event_duration); stream.writeUint32(this.id); stream.writeUint8Array(this.message_data); }; // file:src/writing/ftyp.js BoxParser.ftypBox.prototype.write = function (stream) { this.size = 8 + 4 * this.compatible_brands.length; this.writeHeader(stream); stream.writeString(this.major_brand, null, 4); stream.writeUint32(this.minor_version); for (var i = 0; i < this.compatible_brands.length; i++) { stream.writeString(this.compatible_brands[i], null, 4); } }; // file:src/writing/hdlr.js BoxParser.hdlrBox.prototype.write = function (stream) { this.size = 5 * 4 + this.name.length + 1; this.version = 0; this.flags = 0; this.writeHeader(stream); stream.writeUint32(0); stream.writeString(this.handler, null, 4); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeCString(this.name); }; // file:src/writing/hvcC.js BoxParser.hvcCBox.prototype.write = function (stream) { var i, j; this.size = 23; for (i = 0; i < this.nalu_arrays.length; i++) { this.size += 3; for (j = 0; j < this.nalu_arrays[i].length; j++) { this.size += 2 + this.nalu_arrays[i][j].data.length; } } this.writeHeader(stream); stream.writeUint8(this.configurationVersion); stream.writeUint8(this.general_profile_space << 6 + this.general_tier_flag << 5 + this.general_profile_idc); stream.writeUint32(this.general_profile_compatibility); stream.writeUint8Array(this.general_constraint_indicator); stream.writeUint8(this.general_level_idc); stream.writeUint16(this.min_spatial_segmentation_idc + (15 << 24)); stream.writeUint8(this.parallelismType + (63 << 2)); stream.writeUint8(this.chroma_format_idc + (63 << 2)); stream.writeUint8(this.bit_depth_luma_minus8 + (31 << 3)); stream.writeUint8(this.bit_depth_chroma_minus8 + (31 << 3)); stream.writeUint16(this.avgFrameRate); stream.writeUint8((this.constantFrameRate << 6) + (this.numTemporalLayers << 3) + (this.temporalIdNested << 2) + this.lengthSizeMinusOne); stream.writeUint8(this.nalu_arrays.length); for (i = 0; i < this.nalu_arrays.length; i++) { // bit(1) array_completeness + bit(1) reserved = 0 + bit(6) nal_unit_type stream.writeUint8((this.nalu_arrays[i].completeness << 7) + this.nalu_arrays[i].nalu_type); stream.writeUint16(this.nalu_arrays[i].length); for (j = 0; j < this.nalu_arrays[i].length; j++) { stream.writeUint16(this.nalu_arrays[i][j].data.length); stream.writeUint8Array(this.nalu_arrays[i][j].data); } } }; // file:src/writing/kind.js BoxParser.kindBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = this.schemeURI.length + 1 + (this.value.length + 1); this.writeHeader(stream); stream.writeCString(this.schemeURI); stream.writeCString(this.value); }; // file:src/writing/mdhd.js BoxParser.mdhdBox.prototype.write = function (stream) { this.size = 4 * 4 + 2 * 2; this.flags = 0; this.version = 0; this.writeHeader(stream); stream.writeUint32(this.creation_time); stream.writeUint32(this.modification_time); stream.writeUint32(this.timescale); stream.writeUint32(this.duration); stream.writeUint16(this.language); stream.writeUint16(0); }; // file:src/writing/mehd.js BoxParser.mehdBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4; this.writeHeader(stream); stream.writeUint32(this.fragment_duration); }; // file:src/writing/mfhd.js BoxParser.mfhdBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4; this.writeHeader(stream); stream.writeUint32(this.sequence_number); }; // file:src/writing/mvhd.js BoxParser.mvhdBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 23 * 4 + 2 * 2; this.writeHeader(stream); stream.writeUint32(this.creation_time); stream.writeUint32(this.modification_time); stream.writeUint32(this.timescale); stream.writeUint32(this.duration); stream.writeUint32(this.rate); stream.writeUint16(this.volume << 8); stream.writeUint16(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32Array(this.matrix); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(this.next_track_id); }; // file:src/writing/sampleentry.js BoxParser.SampleEntry.prototype.writeHeader = function (stream) { this.size = 8; BoxParser.Box.prototype.writeHeader.call(this, stream); stream.writeUint8(0); stream.writeUint8(0); stream.writeUint8(0); stream.writeUint8(0); stream.writeUint8(0); stream.writeUint8(0); stream.writeUint16(this.data_reference_index); }; BoxParser.SampleEntry.prototype.writeFooter = function (stream) { for (var i = 0; i < this.boxes.length; i++) { this.boxes[i].write(stream); this.size += this.boxes[i].size; } Log.debug("BoxWriter", "Adjusting box " + this.type + " with new size " + this.size); stream.adjustUint32(this.sizePosition, this.size); }; BoxParser.SampleEntry.prototype.write = function (stream) { this.writeHeader(stream); stream.writeUint8Array(this.data); this.size += this.data.length; Log.debug("BoxWriter", "Adjusting box " + this.type + " with new size " + this.size); stream.adjustUint32(this.sizePosition, this.size); }; BoxParser.VisualSampleEntry.prototype.write = function (stream) { this.writeHeader(stream); this.size += 2 * 7 + 6 * 4 + 32; stream.writeUint16(0); stream.writeUint16(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint32(0); stream.writeUint16(this.width); stream.writeUint16(this.height); stream.writeUint32(this.horizresolution); stream.writeUint32(this.vertresolution); stream.writeUint32(0); stream.writeUint16(this.frame_count); stream.writeUint8(Math.min(31, this.compressorname.length)); stream.writeString(this.compressorname, null, 31); stream.writeUint16(this.depth); stream.writeInt16(-1); this.writeFooter(stream); }; BoxParser.AudioSampleEntry.prototype.write = function (stream) { this.writeHeader(stream); this.size += 2 * 4 + 3 * 4; stream.writeUint32(0); stream.writeUint32(0); stream.writeUint16(this.channel_count); stream.writeUint16(this.samplesize); stream.writeUint16(0); stream.writeUint16(0); stream.writeUint32(this.samplerate << 16); this.writeFooter(stream); }; BoxParser.stppSampleEntry.prototype.write = function (stream) { this.writeHeader(stream); this.size += this.namespace.length + 1 + this.schema_location.length + 1 + this.auxiliary_mime_types.length + 1; stream.writeCString(this.namespace); stream.writeCString(this.schema_location); stream.writeCString(this.auxiliary_mime_types); this.writeFooter(stream); }; // file:src/writing/samplegroups/samplegroup.js BoxParser.SampleGroupEntry.prototype.write = function (stream) { stream.writeUint8Array(this.data); }; // file:src/writing/sbgp.js BoxParser.sbgpBox.prototype.write = function (stream) { this.version = 1; this.flags = 0; this.size = 12 + 8 * this.entries.length; this.writeHeader(stream); stream.writeString(this.grouping_type, null, 4); stream.writeUint32(this.grouping_type_parameter); stream.writeUint32(this.entries.length); for (var i = 0; i < this.entries.length; i++) { var entry = this.entries[i]; stream.writeInt32(entry.sample_count); stream.writeInt32(entry.group_description_index); } }; // file:src/writing/sgpd.js BoxParser.sgpdBox.prototype.write = function (stream) { var i; var entry; // leave version as read // this.version; this.flags = 0; this.size = 12; for (i = 0; i < this.entries.length; i++) { entry = this.entries[i]; if (this.version === 1) { if (this.default_length === 0) { this.size += 4; } this.size += entry.data.length; } } this.writeHeader(stream); stream.writeString(this.grouping_type, null, 4); if (this.version === 1) { stream.writeUint32(this.default_length); } if (this.version >= 2) { stream.writeUint32(this.default_sample_description_index); } stream.writeUint32(this.entries.length); for (i = 0; i < this.entries.length; i++) { entry = this.entries[i]; if (this.version === 1) { if (this.default_length === 0) { stream.writeUint32(entry.description_length); } } entry.write(stream); } }; // file:src/writing/sidx.js BoxParser.sidxBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 * 4 + 2 + 2 + 12 * this.references.length; this.writeHeader(stream); stream.writeUint32(this.reference_ID); stream.writeUint32(this.timescale); stream.writeUint32(this.earliest_presentation_time); stream.writeUint32(this.first_offset); stream.writeUint16(0); stream.writeUint16(this.references.length); for (var i = 0; i < this.references.length; i++) { var ref = this.references[i]; stream.writeUint32(ref.reference_type << 31 | ref.referenced_size); stream.writeUint32(ref.subsegment_duration); stream.writeUint32(ref.starts_with_SAP << 31 | ref.SAP_type << 28 | ref.SAP_delta_time); } }; // file:src/writing/smhd.js BoxParser.smhdBox.prototype.write = function (stream) { this.version = 0; this.flags = 1; this.size = 4; this.writeHeader(stream); stream.writeUint16(this.balance); stream.writeUint16(0); }; // file:src/writing/stco.js BoxParser.stcoBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 + 4 * this.chunk_offsets.length; this.writeHeader(stream); stream.writeUint32(this.chunk_offsets.length); stream.writeUint32Array(this.chunk_offsets); }; // file:src/writing/stsc.js BoxParser.stscBox.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 4 + 12 * this.first_chunk.length; this.writeHeader(stream); stream.writeUint32(this.first_chunk.length); for (i = 0; i < this.first_chunk.length; i++) { stream.writeUint32(this.first_chunk[i]); stream.writeUint32(this.samples_per_chunk[i]); stream.writeUint32(this.sample_description_index[i]); } }; // file:src/writing/stsd.js BoxParser.stsdBox.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 0; this.writeHeader(stream); stream.writeUint32(this.entries.length); this.size += 4; for (i = 0; i < this.entries.length; i++) { this.entries[i].write(stream); this.size += this.entries[i].size; } /* adjusting the size, now that all sub-boxes are known */ Log.debug("BoxWriter", "Adjusting box " + this.type + " with new size " + this.size); stream.adjustUint32(this.sizePosition, this.size); }; // file:src/writing/stsh.js BoxParser.stshBox.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 4 + 8 * this.shadowed_sample_numbers.length; this.writeHeader(stream); stream.writeUint32(this.shadowed_sample_numbers.length); for (i = 0; i < this.shadowed_sample_numbers.length; i++) { stream.writeUint32(this.shadowed_sample_numbers[i]); stream.writeUint32(this.sync_sample_numbers[i]); } }; // file:src/writing/stss.js BoxParser.stssBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 + 4 * this.sample_numbers.length; this.writeHeader(stream); stream.writeUint32(this.sample_numbers.length); stream.writeUint32Array(this.sample_numbers); }; // file:src/writing/stsz.js BoxParser.stszBox.prototype.write = function (stream) { var i; var constant = true; this.version = 0; this.flags = 0; if (this.sample_sizes.length > 0) { i = 0; while (i + 1 < this.sample_sizes.length) { if (this.sample_sizes[i + 1] !== this.sample_sizes[0]) { constant = false; break; } else { i++; } } } else { constant = false; } this.size = 8; if (!constant) { this.size += 4 * this.sample_sizes.length; } this.writeHeader(stream); if (!constant) { stream.writeUint32(0); } else { stream.writeUint32(this.sample_sizes[0]); } stream.writeUint32(this.sample_sizes.length); if (!constant) { stream.writeUint32Array(this.sample_sizes); } }; // file:src/writing/stts.js BoxParser.sttsBox.prototype.write = function (stream) { var i; this.version = 0; this.flags = 0; this.size = 4 + 8 * this.sample_counts.length; this.writeHeader(stream); stream.writeUint32(this.sample_counts.length); for (i = 0; i < this.sample_counts.length; i++) { stream.writeUint32(this.sample_counts[i]); stream.writeUint32(this.sample_deltas[i]); } }; // file:src/writing/tfdt.js BoxParser.tfdtBox.prototype.write = function (stream) { var UINT32_MAX = Math.pow(2, 32) - 1; // use version 1 if baseMediaDecodeTime does not fit 32 bits this.version = this.baseMediaDecodeTime > UINT32_MAX ? 1 : 0; this.flags = 0; this.size = 4; if (this.version === 1) { this.size += 4; } this.writeHeader(stream); if (this.version === 1) { stream.writeUint64(this.baseMediaDecodeTime); } else { stream.writeUint32(this.baseMediaDecodeTime); } }; // file:src/writing/tfhd.js BoxParser.tfhdBox.prototype.write = function (stream) { this.version = 0; this.size = 4; if (this.flags & BoxParser.TFHD_FLAG_BASE_DATA_OFFSET) { this.size += 8; } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_DESC) { this.size += 4; } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_DUR) { this.size += 4; } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_SIZE) { this.size += 4; } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_FLAGS) { this.size += 4; } this.writeHeader(stream); stream.writeUint32(this.track_id); if (this.flags & BoxParser.TFHD_FLAG_BASE_DATA_OFFSET) { stream.writeUint64(this.base_data_offset); } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_DESC) { stream.writeUint32(this.default_sample_description_index); } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_DUR) { stream.writeUint32(this.default_sample_duration); } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_SIZE) { stream.writeUint32(this.default_sample_size); } if (this.flags & BoxParser.TFHD_FLAG_SAMPLE_FLAGS) { stream.writeUint32(this.default_sample_flags); } }; // file:src/writing/tkhd.js BoxParser.tkhdBox.prototype.write = function (stream) { this.version = 0; //this.flags = 0; this.size = 4 * 18 + 2 * 4; this.writeHeader(stream); stream.writeUint32(this.creation_time); stream.writeUint32(this.modification_time); stream.writeUint32(this.track_id); stream.writeUint32(0); stream.writeUint32(this.duration); stream.writeUint32(0); stream.writeUint32(0); stream.writeInt16(this.layer); stream.writeInt16(this.alternate_group); stream.writeInt16(this.volume << 8); stream.writeUint16(0); stream.writeInt32Array(this.matrix); stream.writeUint32(this.width); stream.writeUint32(this.height); }; // file:src/writing/trex.js BoxParser.trexBox.prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = 4 * 5; this.writeHeader(stream); stream.writeUint32(this.track_id); stream.writeUint32(this.default_sample_description_index); stream.writeUint32(this.default_sample_duration); stream.writeUint32(this.default_sample_size); stream.writeUint32(this.default_sample_flags); }; // file:src/writing/trun.js BoxParser.trunBox.prototype.write = function (stream) { this.version = 0; this.size = 4; if (this.flags & BoxParser.TRUN_FLAGS_DATA_OFFSET) { this.size += 4; } if (this.flags & BoxParser.TRUN_FLAGS_FIRST_FLAG) { this.size += 4; } if (this.flags & BoxParser.TRUN_FLAGS_DURATION) { this.size += 4 * this.sample_duration.length; } if (this.flags & BoxParser.TRUN_FLAGS_SIZE) { this.size += 4 * this.sample_size.length; } if (this.flags & BoxParser.TRUN_FLAGS_FLAGS) { this.size += 4 * this.sample_flags.length; } if (this.flags & BoxParser.TRUN_FLAGS_CTS_OFFSET) { this.size += 4 * this.sample_composition_time_offset.length; } this.writeHeader(stream); stream.writeUint32(this.sample_count); if (this.flags & BoxParser.TRUN_FLAGS_DATA_OFFSET) { this.data_offset_position = stream.getPosition(); stream.writeInt32(this.data_offset); //signed } if (this.flags & BoxParser.TRUN_FLAGS_FIRST_FLAG) { stream.writeUint32(this.first_sample_flags); } for (var i = 0; i < this.sample_count; i++) { if (this.flags & BoxParser.TRUN_FLAGS_DURATION) { stream.writeUint32(this.sample_duration[i]); } if (this.flags & BoxParser.TRUN_FLAGS_SIZE) { stream.writeUint32(this.sample_size[i]); } if (this.flags & BoxParser.TRUN_FLAGS_FLAGS) { stream.writeUint32(this.sample_flags[i]); } if (this.flags & BoxParser.TRUN_FLAGS_CTS_OFFSET) { if (this.version === 0) { stream.writeUint32(this.sample_composition_time_offset[i]); } else { stream.writeInt32(this.sample_composition_time_offset[i]); //signed } } } }; // file:src/writing/url.js BoxParser["url Box"].prototype.write = function (stream) { this.version = 0; if (this.location) { this.flags = 0; this.size = this.location.length + 1; } else { this.flags = 0x000001; this.size = 0; } this.writeHeader(stream); if (this.location) { stream.writeCString(this.location); } }; // file:src/writing/urn.js BoxParser["urn Box"].prototype.write = function (stream) { this.version = 0; this.flags = 0; this.size = this.name.length + 1 + (this.location ? this.location.length + 1 : 0); this.writeHeader(stream); stream.writeCString(this.name); if (this.location) { stream.writeCString(this.location); } }; // file:src/writing/vmhd.js BoxParser.vmhdBox.prototype.write = function (stream) { this.version = 0; this.flags = 1; this.size = 8; this.writeHeader(stream); stream.writeUint16(this.graphicsmode); stream.writeUint16Array(this.opcolor); }; // file:src/box-unpack.js /* * Copyright (c) Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ BoxParser.cttsBox.prototype.unpack = function (samples) { var i, j, k; k = 0; for (i = 0; i < this.sample_counts.length; i++) { for (j = 0; j < this.sample_counts[i]; j++) { samples[k].pts = samples[k].dts + this.sample_offsets[i]; k++; } } }; BoxParser.sttsBox.prototype.unpack = function (samples) { var i, j, k; k = 0; for (i = 0; i < this.sample_counts.length; i++) { for (j = 0; j < this.sample_counts[i]; j++) { if (k === 0) { samples[k].dts = 0; } else { samples[k].dts = samples[k - 1].dts + this.sample_deltas[i]; } k++; } } }; BoxParser.stcoBox.prototype.unpack = function (samples) { var i; for (i = 0; i < this.chunk_offsets.length; i++) { samples[i].offset = this.chunk_offsets[i]; } }; BoxParser.stscBox.prototype.unpack = function (samples) { var i, j, k, l, m; l = 0; m = 0; for (i = 0; i < this.first_chunk.length; i++) { for (j = 0; j < (i + 1 < this.first_chunk.length ? this.first_chunk[i + 1] : Infinity); j++) { m++; for (k = 0; k < this.samples_per_chunk[i]; k++) { if (samples[l]) { samples[l].description_index = this.sample_description_index[i]; samples[l].chunk_index = m; } else { return; } l++; } } } }; BoxParser.stszBox.prototype.unpack = function (samples) { var i; for (i = 0; i < this.sample_sizes.length; i++) { samples[i].size = this.sample_sizes[i]; } }; // file:src/box-diff.js BoxParser.DIFF_BOXES_PROP_NAMES = ["boxes", "entries", "references", "subsamples", "items", "item_infos", "extents", "associations", "subsegments", "ranges", "seekLists", "seekPoints", "esd", "levels"]; BoxParser.DIFF_PRIMITIVE_ARRAY_PROP_NAMES = ["compatible_brands", "matrix", "opcolor", "sample_counts", "sample_counts", "sample_deltas", "first_chunk", "samples_per_chunk", "sample_sizes", "chunk_offsets", "sample_offsets", "sample_description_index", "sample_duration"]; BoxParser.boxEqualFields = function (box_a, box_b) { if (box_a && !box_b) return false; var prop; for (prop in box_a) { if (BoxParser.DIFF_BOXES_PROP_NAMES.indexOf(prop) > -1) { continue; // } else if (excluded_fields && excluded_fields.indexOf(prop) > -1) { // continue; } else if (box_a[prop] instanceof BoxParser.Box || box_b[prop] instanceof BoxParser.Box) { continue; } else if (typeof box_a[prop] === "undefined" || typeof box_b[prop] === "undefined") { continue; } else if (typeof box_a[prop] === "function" || typeof box_b[prop] === "function") { continue; } else if (box_a.subBoxNames && box_a.subBoxNames.indexOf(prop.slice(0, 4)) > -1 || box_b.subBoxNames && box_b.subBoxNames.indexOf(prop.slice(0, 4)) > -1) { continue; } else { if (prop === "data" || prop === "start" || prop === "size" || prop === "creation_time" || prop === "modification_time") { continue; } else if (BoxParser.DIFF_PRIMITIVE_ARRAY_PROP_NAMES.indexOf(prop) > -1) { continue; } else { if (box_a[prop] !== box_b[prop]) { return false; } } } } return true; }; BoxParser.boxEqual = function (box_a, box_b) { if (!BoxParser.boxEqualFields(box_a, box_b)) { return false; } for (var j = 0; j < BoxParser.DIFF_BOXES_PROP_NAMES.length; j++) { var name = BoxParser.DIFF_BOXES_PROP_NAMES[j]; if (box_a[name] && box_b[name]) { if (!BoxParser.boxEqual(box_a[name], box_b[name])) { return false; } } } return true; }; // file:src/text-mp4.js var XMLSubtitlein4Parser = function () {}; XMLSubtitlein4Parser.prototype.parseSample = function (sample) { var res = {}; var i; res.resources = []; var stream = new MP4BoxStream(sample.data.buffer); if (!sample.subsamples || sample.subsamples.length === 0) { res.documentString = stream.readString(sample.data.length); } else { res.documentString = stream.readString(sample.subsamples[0].size); if (sample.subsamples.length > 1) { for (i = 1; i < sample.subsamples.length; i++) { res.resources[i] = stream.readUint8Array(sample.subsamples[i].size); } } } if (typeof DOMParser !== "undefined") { res.document = new DOMParser().parseFromString(res.documentString, "application/xml"); } return res; }; var Textin4Parser = function () {}; Textin4Parser.prototype.parseSample = function (sample) { var textString; var stream = new MP4BoxStream(sample.data.buffer); textString = stream.readString(sample.data.length); return textString; }; Textin4Parser.prototype.parseConfig = function (data) { var textString; var stream = new MP4BoxStream(data.buffer); stream.readUint32(); // version & flags textString = stream.readCString(); return textString; }; { exports.XMLSubtitlein4Parser = XMLSubtitlein4Parser; exports.Textin4Parser = Textin4Parser; } // file:src/isofile.js /* * Copyright (c) 2012-2013. Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ var ISOFile = function (stream) { /* MutiBufferStream object used to parse boxes */ this.stream = stream || new MultiBufferStream(); /* Array of all boxes (in order) found in the file */ this.boxes = []; /* Array of all mdats */ this.mdats = []; /* Array of all moofs */ this.moofs = []; /* Boolean indicating if the file is compatible with progressive parsing (moov first) */ this.isProgressive = false; /* Boolean used to fire moov start event only once */ this.moovStartFound = false; /* Callback called when the moov parsing starts */ this.onMoovStart = null; /* Boolean keeping track of the call to onMoovStart, to avoid double calls */ this.moovStartSent = false; /* Callback called when the moov is entirely parsed */ this.onReady = null; /* Boolean keeping track of the call to onReady, to avoid double calls */ this.readySent = false; /* Callback to call when segments are ready */ this.onSegment = null; /* Callback to call when samples are ready */ this.onSamples = null; /* Callback to call when there is an error in the parsing or processing of samples */ this.onError = null; /* Boolean indicating if the moov box run-length encoded tables of sample information have been processed */ this.sampleListBuilt = false; /* Array of Track objects for which fragmentation of samples is requested */ this.fragmentedTracks = []; /* Array of Track objects for which extraction of samples is requested */ this.extractedTracks = []; /* Boolean indicating that fragmention is ready */ this.isFragmentationInitialized = false; /* Boolean indicating that fragmented has started */ this.sampleProcessingStarted = false; /* Number of the next 'moof' to generate when fragmenting */ this.nextMoofNumber = 0; /* Boolean indicating if the initial list of items has been produced */ this.itemListBuilt = false; /* Callback called when the sidx box is entirely parsed */ this.onSidx = null; /* Boolean keeping track of the call to onSidx, to avoid double calls */ this.sidxSent = false; }; ISOFile.prototype.setSegmentOptions = function (id, user, options) { var trak = this.getTrackById(id); if (trak) { var fragTrack = {}; this.fragmentedTracks.push(fragTrack); fragTrack.id = id; fragTrack.user = user; fragTrack.trak = trak; trak.nextSample = 0; fragTrack.segmentStream = null; fragTrack.nb_samples = 1000; fragTrack.rapAlignement = true; if (options) { if (options.nbSamples) fragTrack.nb_samples = options.nbSamples; if (options.rapAlignement) fragTrack.rapAlignement = options.rapAlignement; } } }; ISOFile.prototype.unsetSegmentOptions = function (id) { var index = -1; for (var i = 0; i < this.fragmentedTracks.length; i++) { var fragTrack = this.fragmentedTracks[i]; if (fragTrack.id == id) { index = i; } } if (index > -1) { this.fragmentedTracks.splice(index, 1); } }; ISOFile.prototype.setExtractionOptions = function (id, user, options) { var trak = this.getTrackById(id); if (trak) { var extractTrack = {}; this.extractedTracks.push(extractTrack); extractTrack.id = id; extractTrack.user = user; extractTrack.trak = trak; trak.nextSample = 0; extractTrack.nb_samples = 1000; extractTrack.samples = []; if (options) { if (options.nbSamples) extractTrack.nb_samples = options.nbSamples; } } }; ISOFile.prototype.unsetExtractionOptions = function (id) { var index = -1; for (var i = 0; i < this.extractedTracks.length; i++) { var extractTrack = this.extractedTracks[i]; if (extractTrack.id == id) { index = i; } } if (index > -1) { this.extractedTracks.splice(index, 1); } }; ISOFile.prototype.parse = function () { var ret; var box; var parseBoxHeadersOnly = false; if (this.restoreParsePosition) { if (!this.restoreParsePosition()) { return; } } while (true) { if (this.hasIncompleteMdat && this.hasIncompleteMdat()) { if (this.processIncompleteMdat()) { continue; } else { return; } } else { if (this.saveParsePosition) { this.saveParsePosition(); } ret = BoxParser.parseOneBox(this.stream, parseBoxHeadersOnly); if (ret.code === BoxParser.ERR_NOT_ENOUGH_DATA) { if (this.processIncompleteBox) { if (this.processIncompleteBox(ret)) { continue; } else { return; } } else { return; } } else { var box_type; /* the box is entirely parsed */ box = ret.box; box_type = box.type !== "uuid" ? box.type : box.uuid; /* store the box in the 'boxes' array to preserve box order (for file rewrite if needed) */ this.boxes.push(box); /* but also store box in a property for more direct access */ switch (box_type) { case "mdat": this.mdats.push(box); break; case "moof": this.moofs.push(box); break; case "moov": this.moovStartFound = true; if (this.mdats.length === 0) { this.isProgressive = true; } /* no break */ /* falls through */ default: if (this[box_type] !== undefined) { Log.warn("ISOFile", "Duplicate Box of type: " + box_type + ", overriding previous occurrence"); } this[box_type] = box; break; } if (this.updateUsedBytes) { this.updateUsedBytes(box, ret); } } } } }; ISOFile.prototype.checkBuffer = function (ab) { if (ab === null || ab === undefined) { throw "Buffer must be defined and non empty"; } if (ab.fileStart === undefined) { throw "Buffer must have a fileStart property"; } if (ab.byteLength === 0) { Log.warn("ISOFile", "Ignoring empty buffer (fileStart: " + ab.fileStart + ")"); this.stream.logBufferLevel(); return false; } Log.info("ISOFile", "Processing buffer (fileStart: " + ab.fileStart + ")"); /* mark the bytes in the buffer as not being used yet */ ab.usedBytes = 0; this.stream.insertBuffer(ab); this.stream.logBufferLevel(); if (!this.stream.initialized()) { Log.warn("ISOFile", "Not ready to start parsing"); return false; } return true; }; /* Processes a new ArrayBuffer (with a fileStart property) Returns the next expected file position, or undefined if not ready to parse */ ISOFile.prototype.appendBuffer = function (ab, last) { var nextFileStart; if (!this.checkBuffer(ab)) { return; } /* Parse whatever is in the existing buffers */ this.parse(); /* Check if the moovStart callback needs to be called */ if (this.moovStartFound && !this.moovStartSent) { this.moovStartSent = true; if (this.onMoovStart) this.onMoovStart(); } if (this.moov) { /* A moov box has been entirely parsed */ /* if this is the first call after the moov is found we initialize the list of samples (may be empty in fragmented files) */ if (!this.sampleListBuilt) { this.buildSampleLists(); this.sampleListBuilt = true; } /* We update the sample information if there are any new moof boxes */ this.updateSampleLists(); /* If the application needs to be informed that the 'moov' has been found, we create the information object and callback the application */ if (this.onReady && !this.readySent) { this.readySent = true; this.onReady(this.getInfo()); } /* See if any sample extraction or segment creation needs to be done with the available samples */ this.processSamples(last); /* Inform about the best range to fetch next */ if (this.nextSeekPosition) { nextFileStart = this.nextSeekPosition; this.nextSeekPosition = undefined; } else { nextFileStart = this.nextParsePosition; } if (this.stream.getEndFilePositionAfter) { nextFileStart = this.stream.getEndFilePositionAfter(nextFileStart); } } else { if (this.nextParsePosition) { /* moov has not been parsed but the first buffer was received, the next fetch should probably be the next box start */ nextFileStart = this.nextParsePosition; } else { /* No valid buffer has been parsed yet, we cannot know what to parse next */ nextFileStart = 0; } } if (this.sidx) { if (this.onSidx && !this.sidxSent) { this.onSidx(this.sidx); this.sidxSent = true; } } if (this.meta) { if (this.flattenItemInfo && !this.itemListBuilt) { this.flattenItemInfo(); this.itemListBuilt = true; } if (this.processItems) { this.processItems(this.onItem); } } if (this.stream.cleanBuffers) { Log.info("ISOFile", "Done processing buffer (fileStart: " + ab.fileStart + ") - next buffer to fetch should have a fileStart position of " + nextFileStart); this.stream.logBufferLevel(); this.stream.cleanBuffers(); this.stream.logBufferLevel(true); Log.info("ISOFile", "Sample data size in memory: " + this.getAllocatedSampleDataSize()); } return nextFileStart; }; ISOFile.prototype.getInfo = function () { var i, j; var movie = {}; var trak; var track; var ref; var sample_desc; var _1904 = new Date('1904-01-01T00:00:00Z').getTime(); if (this.moov) { movie.hasMoov = true; movie.duration = this.moov.mvhd.duration; movie.timescale = this.moov.mvhd.timescale; movie.isFragmented = this.moov.mvex != null; if (movie.isFragmented && this.moov.mvex.mehd) { movie.fragment_duration = this.moov.mvex.mehd.fragment_duration; } movie.isProgressive = this.isProgressive; movie.hasIOD = this.moov.iods != null; movie.brands = []; movie.brands.push(this.ftyp.major_brand); movie.brands = movie.brands.concat(this.ftyp.compatible_brands); movie.created = new Date(_1904 + this.moov.mvhd.creation_time * 1000); movie.modified = new Date(_1904 + this.moov.mvhd.modification_time * 1000); movie.tracks = []; movie.audioTracks = []; movie.videoTracks = []; movie.subtitleTracks = []; movie.metadataTracks = []; movie.hintTracks = []; movie.otherTracks = []; for (i = 0; i < this.moov.traks.length; i++) { trak = this.moov.traks[i]; sample_desc = trak.mdia.minf.stbl.stsd.entries[0]; track = {}; movie.tracks.push(track); track.id = trak.tkhd.track_id; track.name = trak.mdia.hdlr.name; track.references = []; if (trak.tref) { for (j = 0; j < trak.tref.boxes.length; j++) { ref = {}; track.references.push(ref); ref.type = trak.tref.boxes[j].type; ref.track_ids = trak.tref.boxes[j].track_ids; } } if (trak.edts) { track.edits = trak.edts.elst.entries; } track.created = new Date(_1904 + trak.tkhd.creation_time * 1000); track.modified = new Date(_1904 + trak.tkhd.modification_time * 1000); track.movie_duration = trak.tkhd.duration; track.movie_timescale = movie.timescale; track.layer = trak.tkhd.layer; track.alternate_group = trak.tkhd.alternate_group; track.volume = trak.tkhd.volume; track.matrix = trak.tkhd.matrix; track.track_width = trak.tkhd.width / (1 << 16); track.track_height = trak.tkhd.height / (1 << 16); track.timescale = trak.mdia.mdhd.timescale; track.cts_shift = trak.mdia.minf.stbl.cslg; track.duration = trak.mdia.mdhd.duration; track.samples_duration = trak.samples_duration; track.codec = sample_desc.getCodec(); track.kind = trak.udta && trak.udta.kinds.length ? trak.udta.kinds[0] : { schemeURI: "", value: "" }; track.language = trak.mdia.elng ? trak.mdia.elng.extended_language : trak.mdia.mdhd.languageString; track.nb_samples = trak.samples.length; track.size = trak.samples_size; track.bitrate = track.size * 8 * track.timescale / track.samples_duration; if (sample_desc.isAudio()) { track.type = "audio"; movie.audioTracks.push(track); track.audio = {}; track.audio.sample_rate = sample_desc.getSampleRate(); track.audio.channel_count = sample_desc.getChannelCount(); track.audio.sample_size = sample_desc.getSampleSize(); } else if (sample_desc.isVideo()) { track.type = "video"; movie.videoTracks.push(track); track.video = {}; track.video.width = sample_desc.getWidth(); track.video.height = sample_desc.getHeight(); } else if (sample_desc.isSubtitle()) { track.type = "subtitles"; movie.subtitleTracks.push(track); } else if (sample_desc.isHint()) { track.type = "metadata"; movie.hintTracks.push(track); } else if (sample_desc.isMetadata()) { track.type = "metadata"; movie.metadataTracks.push(track); } else { track.type = "metadata"; movie.otherTracks.push(track); } } } else { movie.hasMoov = false; } movie.mime = ""; if (movie.hasMoov && movie.tracks) { if (movie.videoTracks && movie.videoTracks.length > 0) { movie.mime += 'video/mp4; codecs=\"'; } else if (movie.audioTracks && movie.audioTracks.length > 0) { movie.mime += 'audio/mp4; codecs=\"'; } else { movie.mime += 'application/mp4; codecs=\"'; } for (i = 0; i < movie.tracks.length; i++) { if (i !== 0) movie.mime += ','; movie.mime += movie.tracks[i].codec; } movie.mime += '\"; profiles=\"'; movie.mime += this.ftyp.compatible_brands.join(); movie.mime += '\"'; } return movie; }; ISOFile.prototype.setNextSeekPositionFromSample = function (sample) { if (!sample) { return; } if (this.nextSeekPosition) { this.nextSeekPosition = Math.min(sample.offset + sample.alreadyRead, this.nextSeekPosition); } else { this.nextSeekPosition = sample.offset + sample.alreadyRead; } }; ISOFile.prototype.processSamples = function (last) { var i; var trak; if (!this.sampleProcessingStarted) return; /* For each track marked for fragmentation, check if the next sample is there (i.e. if the sample information is known (i.e. moof has arrived) and if it has been downloaded) and create a fragment with it */ if (this.isFragmentationInitialized && this.onSegment !== null) { for (i = 0; i < this.fragmentedTracks.length; i++) { var fragTrak = this.fragmentedTracks[i]; trak = fragTrak.trak; while (trak.nextSample < trak.samples.length && this.sampleProcessingStarted) { /* The sample information is there (either because the file is not fragmented and this is not the last sample, or because the file is fragmented and the moof for that sample has been received */ Log.debug("ISOFile", "Creating media fragment on track #" + fragTrak.id + " for sample " + trak.nextSample); var result = this.createFragment(fragTrak.id, trak.nextSample, fragTrak.segmentStream); if (result) { fragTrak.segmentStream = result; trak.nextSample++; } else { /* The fragment could not be created because the media data is not there (not downloaded), wait for it */ break; } /* A fragment is created by sample, but the segment is the accumulation in the buffer of these fragments. It is flushed only as requested by the application (nb_samples) to avoid too many callbacks */ if (trak.nextSample % fragTrak.nb_samples === 0 || last || trak.nextSample >= trak.samples.length) { Log.info("ISOFile", "Sending fragmented data on track #" + fragTrak.id + " for samples [" + Math.max(0, trak.nextSample - fragTrak.nb_samples) + "," + (trak.nextSample - 1) + "]"); Log.info("ISOFile", "Sample data size in memory: " + this.getAllocatedSampleDataSize()); if (this.onSegment) { this.onSegment(fragTrak.id, fragTrak.user, fragTrak.segmentStream.buffer, trak.nextSample, last || trak.nextSample >= trak.samples.length); } /* force the creation of a new buffer */ fragTrak.segmentStream = null; if (fragTrak !== this.fragmentedTracks[i]) { /* make sure we can stop fragmentation if needed */ break; } } } } } if (this.onSamples !== null) { /* For each track marked for data export, check if the next sample is there (i.e. has been downloaded) and send it */ for (i = 0; i < this.extractedTracks.length; i++) { var extractTrak = this.extractedTracks[i]; trak = extractTrak.trak; while (trak.nextSample < trak.samples.length && this.sampleProcessingStarted) { Log.debug("ISOFile", "Exporting on track #" + extractTrak.id + " sample #" + trak.nextSample); var sample = this.getSample(trak, trak.nextSample); if (sample) { trak.nextSample++; extractTrak.samples.push(sample); } else { this.setNextSeekPositionFromSample(trak.samples[trak.nextSample]); break; } if (trak.nextSample % extractTrak.nb_samples === 0 || trak.nextSample >= trak.samples.length) { Log.debug("ISOFile", "Sending samples on track #" + extractTrak.id + " for sample " + trak.nextSample); if (this.onSamples) { this.onSamples(extractTrak.id, extractTrak.user, extractTrak.samples); } extractTrak.samples = []; if (extractTrak !== this.extractedTracks[i]) { /* check if the extraction needs to be stopped */ break; } } } } } }; /* Find and return specific boxes using recursion and early return */ ISOFile.prototype.getBox = function (type) { var result = this.getBoxes(type, true); return result.length ? result[0] : null; }; ISOFile.prototype.getBoxes = function (type, returnEarly) { var result = []; ISOFile._sweep.call(this, type, result, returnEarly); return result; }; ISOFile._sweep = function (type, result, returnEarly) { if (this.type && this.type == type) result.push(this); for (var box in this.boxes) { if (result.length && returnEarly) return; ISOFile._sweep.call(this.boxes[box], type, result, returnEarly); } }; ISOFile.prototype.getTrackSamplesInfo = function (track_id) { var track = this.getTrackById(track_id); if (track) { return track.samples; } else { return; } }; ISOFile.prototype.getTrackSample = function (track_id, number) { var track = this.getTrackById(track_id); var sample = this.getSample(track, number); return sample; }; /* Called by the application to release the resources associated to samples already forwarded to the application */ ISOFile.prototype.releaseUsedSamples = function (id, sampleNum) { var size = 0; var trak = this.getTrackById(id); if (!trak.lastValidSample) trak.lastValidSample = 0; for (var i = trak.lastValidSample; i < sampleNum; i++) { size += this.releaseSample(trak, i); } Log.info("ISOFile", "Track #" + id + " released samples up to " + sampleNum + " (released size: " + size + ", remaining: " + this.samplesDataSize + ")"); trak.lastValidSample = sampleNum; }; ISOFile.prototype.start = function () { this.sampleProcessingStarted = true; this.processSamples(false); }; ISOFile.prototype.stop = function () { this.sampleProcessingStarted = false; }; /* Called by the application to flush the remaining samples (e.g. once the download is finished or when no more samples will be added) */ ISOFile.prototype.flush = function () { Log.info("ISOFile", "Flushing remaining samples"); this.updateSampleLists(); this.processSamples(true); this.stream.cleanBuffers(); this.stream.logBufferLevel(true); }; /* Finds the byte offset for a given time on a given track also returns the time of the previous rap */ ISOFile.prototype.seekTrack = function (time, useRap, trak) { var j; var sample; var seek_offset = Infinity; var rap_seek_sample_num = 0; var seek_sample_num = 0; var timescale; if (trak.samples.length === 0) { Log.info("ISOFile", "No sample in track, cannot seek! Using time " + Log.getDurationString(0, 1) + " and offset: " + 0); return { offset: 0, time: 0 }; } for (j = 0; j < trak.samples.length; j++) { sample = trak.samples[j]; if (j === 0) { seek_sample_num = 0; timescale = sample.timescale; } else if (sample.cts > time * sample.timescale) { seek_sample_num = j - 1; break; } if (useRap && sample.is_sync) { rap_seek_sample_num = j; } } if (useRap) { seek_sample_num = rap_seek_sample_num; } time = trak.samples[seek_sample_num].cts; trak.nextSample = seek_sample_num; while (trak.samples[seek_sample_num].alreadyRead === trak.samples[seek_sample_num].size) { // No remaining samples to look for, all are downloaded. if (!trak.samples[seek_sample_num + 1]) { break; } seek_sample_num++; } seek_offset = trak.samples[seek_sample_num].offset + trak.samples[seek_sample_num].alreadyRead; Log.info("ISOFile", "Seeking to " + (useRap ? "RAP" : "") + " sample #" + trak.nextSample + " on track " + trak.tkhd.track_id + ", time " + Log.getDurationString(time, timescale) + " and offset: " + seek_offset); return { offset: seek_offset, time: time / timescale }; }; ISOFile.prototype.getTrackDuration = function (trak) { var sample; if (!trak.samples) { return Infinity; } sample = trak.samples[trak.samples.length - 1]; return (sample.cts + sample.duration) / sample.timescale; }; /* Finds the byte offset in the file corresponding to the given time or to the time of the previous RAP */ ISOFile.prototype.seek = function (time, useRap) { var moov = this.moov; var trak; var trak_seek_info; var i; var seek_info = { offset: Infinity, time: Infinity }; if (!this.moov) { throw "Cannot seek: moov not received!"; } else { for (i = 0; i < moov.traks.length; i++) { trak = moov.traks[i]; if (time > this.getTrackDuration(trak)) { // skip tracks that already ended continue; } trak_seek_info = this.seekTrack(time, useRap, trak); if (trak_seek_info.offset < seek_info.offset) { seek_info.offset = trak_seek_info.offset; } if (trak_seek_info.time < seek_info.time) { seek_info.time = trak_seek_info.time; } } Log.info("ISOFile", "Seeking at time " + Log.getDurationString(seek_info.time, 1) + " needs a buffer with a fileStart position of " + seek_info.offset); if (seek_info.offset === Infinity) { /* No sample info, in all tracks, cannot seek */ seek_info = { offset: this.nextParsePosition, time: 0 }; } else { /* check if the seek position is already in some buffer and in that case return the end of that buffer (or of the last contiguous buffer) */ /* TODO: Should wait until append operations are done */ seek_info.offset = this.stream.getEndFilePositionAfter(seek_info.offset); } Log.info("ISOFile", "Adjusted seek position (after checking data already in buffer): " + seek_info.offset); return seek_info; } }; ISOFile.prototype.equal = function (b) { var box_index = 0; while (box_index < this.boxes.length && box_index < b.boxes.length) { var a_box = this.boxes[box_index]; var b_box = b.boxes[box_index]; if (!BoxParser.boxEqual(a_box, b_box)) { return false; } box_index++; } return true; }; { exports.ISOFile = ISOFile; } // file:src/isofile-advanced-parsing.js /* position in the current buffer of the beginning of the last box parsed */ ISOFile.prototype.lastBoxStartPosition = 0; /* indicator if the parsing is stuck in the middle of an mdat box */ ISOFile.prototype.parsingMdat = null; /* next file position that the parser needs: - 0 until the first buffer (i.e. fileStart ===0) has been received - otherwise, the next box start until the moov box has been parsed - otherwise, the position of the next sample to fetch */ ISOFile.prototype.nextParsePosition = 0; /* keep mdat data */ ISOFile.prototype.discardMdatData = false; ISOFile.prototype.processIncompleteBox = function (ret) { var box; var merged; var found; /* we did not have enough bytes in the current buffer to parse the entire box */ if (ret.type === "mdat") { /* we had enough bytes to get its type and size and it's an 'mdat' */ /* special handling for mdat boxes, since we don't actually need to parse it linearly we create the box */ box = new BoxParser[ret.type + "Box"](ret.size); this.parsingMdat = box; this.boxes.push(box); this.mdats.push(box); box.start = ret.start; box.hdr_size = ret.hdr_size; this.stream.addUsedBytes(box.hdr_size); /* indicate that the parsing should start from the end of the box */ this.lastBoxStartPosition = box.start + box.size; /* let's see if we have the end of the box in the other buffers */ found = this.stream.seek(box.start + box.size, false, this.discardMdatData); if (found) { /* found the end of the box */ this.parsingMdat = null; /* let's see if we can parse more in this buffer */ return true; } else { /* 'mdat' end not found in the existing buffers */ /* determine the next position in the file to start parsing from */ if (!this.moovStartFound) { /* moov not find yet, the file probably has 'mdat' at the beginning, and 'moov' at the end, indicate that the downloader should not try to download those bytes now */ this.nextParsePosition = box.start + box.size; } else { /* we have the start of the moov box, the next bytes should try to complete the current 'mdat' */ this.nextParsePosition = this.stream.findEndContiguousBuf(); } /* not much we can do, wait for more buffers to arrive */ return false; } } else { /* box is incomplete, we may not even know its type */ if (ret.type === "moov") { /* the incomplete box is a 'moov' box */ this.moovStartFound = true; if (this.mdats.length === 0) { this.isProgressive = true; } } /* either it's not an mdat box (and we need to parse it, we cannot skip it) (TODO: we could skip 'free' boxes ...) or we did not have enough data to parse the type and size of the box, we try to concatenate the current buffer with the next buffer to restart parsing */ merged = this.stream.mergeNextBuffer ? this.stream.mergeNextBuffer() : false; if (merged) { /* The next buffer was contiguous, the merging succeeded, we can now continue parsing, the next best position to parse is at the end of this new buffer */ this.nextParsePosition = this.stream.getEndPosition(); return true; } else { /* we cannot concatenate existing buffers because they are not contiguous or because there is no additional buffer */ /* The next best position to parse is still at the end of this old buffer */ if (!ret.type) { /* There were not enough bytes in the buffer to parse the box type and length, the next fetch should retrieve those missing bytes, i.e. the next bytes after this buffer */ this.nextParsePosition = this.stream.getEndPosition(); } else { /* we had enough bytes to parse size and type of the incomplete box if we haven't found yet the moov box, skip this one and try the next one if we have found the moov box, let's continue linear parsing */ if (this.moovStartFound) { this.nextParsePosition = this.stream.getEndPosition(); } else { this.nextParsePosition = this.stream.getPosition() + ret.size; } } return false; } } }; ISOFile.prototype.hasIncompleteMdat = function () { return this.parsingMdat !== null; }; ISOFile.prototype.processIncompleteMdat = function () { var box; var found; /* we are in the parsing of an incomplete mdat box */ box = this.parsingMdat; found = this.stream.seek(box.start + box.size, false, this.discardMdatData); if (found) { Log.debug("ISOFile", "Found 'mdat' end in buffered data"); /* the end of the mdat has been found */ this.parsingMdat = null; /* we can parse more in this buffer */ return true; } else { /* we don't have the end of this mdat yet, indicate that the next byte to fetch is the end of the buffers we have so far, return and wait for more buffer to come */ this.nextParsePosition = this.stream.findEndContiguousBuf(); return false; } }; ISOFile.prototype.restoreParsePosition = function () { /* Reposition at the start position of the previous box not entirely parsed */ return this.stream.seek(this.lastBoxStartPosition, true, this.discardMdatData); }; ISOFile.prototype.saveParsePosition = function () { /* remember the position of the box start in case we need to roll back (if the box is incomplete) */ this.lastBoxStartPosition = this.stream.getPosition(); }; ISOFile.prototype.updateUsedBytes = function (box, ret) { if (this.stream.addUsedBytes) { if (box.type === "mdat") { /* for an mdat box, only its header is considered used, other bytes will be used when sample data is requested */ this.stream.addUsedBytes(box.hdr_size); if (this.discardMdatData) { this.stream.addUsedBytes(box.size - box.hdr_size); } } else { /* for all other boxes, the entire box data is considered used */ this.stream.addUsedBytes(box.size); } } }; // file:src/isofile-advanced-creation.js ISOFile.prototype.add = BoxParser.Box.prototype.add; ISOFile.prototype.addBox = BoxParser.Box.prototype.addBox; ISOFile.prototype.init = function (_options) { var options = _options || {}; this.add("ftyp").set("major_brand", options.brands && options.brands[0] || "iso4").set("minor_version", 0).set("compatible_brands", options.brands || ["iso4"]); var moov = this.add("moov"); moov.add("mvhd").set("timescale", options.timescale || 600).set("rate", options.rate || 1 << 16).set("creation_time", 0).set("modification_time", 0).set("duration", options.duration || 0).set("volume", options.width ? 0 : 0x0100).set("matrix", [1 << 16, 0, 0, 0, 1 << 16, 0, 0, 0, 0x40000000]).set("next_track_id", 1); moov.add("mvex"); return this; }; ISOFile.prototype.addTrack = function (_options) { if (!this.moov) { this.init(_options); } var options = _options || {}; options.width = options.width || 320; options.height = options.height || 320; options.id = options.id || this.moov.mvhd.next_track_id; options.type = options.type || "avc1"; var trak = this.moov.add("trak"); this.moov.mvhd.next_track_id = options.id + 1; trak.add("tkhd").set("flags", BoxParser.TKHD_FLAG_ENABLED | BoxParser.TKHD_FLAG_IN_MOVIE | BoxParser.TKHD_FLAG_IN_PREVIEW).set("creation_time", 0).set("modification_time", 0).set("track_id", options.id).set("duration", options.duration || 0).set("layer", options.layer || 0).set("alternate_group", 0).set("volume", 1).set("matrix", [0, 0, 0, 0, 0, 0, 0, 0, 0]).set("width", options.width << 16).set("height", options.height << 16); var mdia = trak.add("mdia"); mdia.add("mdhd").set("creation_time", 0).set("modification_time", 0).set("timescale", options.timescale || 1).set("duration", options.media_duration || 0).set("language", options.language || "und"); mdia.add("hdlr").set("handler", options.hdlr || "vide").set("name", options.name || "Track created with MP4Box.js"); mdia.add("elng").set("extended_language", options.language || "fr-FR"); var minf = mdia.add("minf"); if (BoxParser[options.type + "SampleEntry"] === undefined) return; var sample_description_entry = new BoxParser[options.type + "SampleEntry"](); sample_description_entry.data_reference_index = 1; var media_type = ""; for (var mediaType in BoxParser.sampleEntryCodes) { var codes = BoxParser.sampleEntryCodes[mediaType]; for (var i = 0; i < codes.length; i++) { if (codes.indexOf(options.type) > -1) { media_type = mediaType; break; } } } switch (media_type) { case "Visual": minf.add("vmhd").set("graphicsmode", 0).set("opcolor", [0, 0, 0]); sample_description_entry.set("width", options.width).set("height", options.height).set("horizresolution", 0x48 << 16).set("vertresolution", 0x48 << 16).set("frame_count", 1).set("compressorname", options.type + " Compressor").set("depth", 0x18); if (options.avcDecoderConfigRecord) { var avcC = new BoxParser.avcCBox(); avcC.parse(new MP4BoxStream(options.avcDecoderConfigRecord)); sample_description_entry.addBox(avcC); } else if (options.hevcDecoderConfigRecord) { var hvcC = new BoxParser.hvcCBox(); hvcC.parse(new MP4BoxStream(options.hevcDecoderConfigRecord)); sample_description_entry.addBox(hvcC); } break; case "Audio": minf.add("smhd").set("balance", options.balance || 0); sample_description_entry.set("channel_count", options.channel_count || 2).set("samplesize", options.samplesize || 16).set("samplerate", options.samplerate || 1 << 16); break; case "Hint": minf.add("hmhd"); // TODO: add properties break; case "Subtitle": minf.add("sthd"); switch (options.type) { case "stpp": sample_description_entry.set("namespace", options.namespace || "nonamespace").set("schema_location", options.schema_location || "").set("auxiliary_mime_types", options.auxiliary_mime_types || ""); break; } break; case "Metadata": minf.add("nmhd"); break; case "System": minf.add("nmhd"); break; default: minf.add("nmhd"); break; } if (options.description) { sample_description_entry.addBox(options.description); } if (options.description_boxes) { options.description_boxes.forEach(function (b) { sample_description_entry.addBox(b); }); } minf.add("dinf").add("dref").addEntry(new BoxParser["url Box"]().set("flags", 0x1)); var stbl = minf.add("stbl"); stbl.add("stsd").addEntry(sample_description_entry); stbl.add("stts").set("sample_counts", []).set("sample_deltas", []); stbl.add("stsc").set("first_chunk", []).set("samples_per_chunk", []).set("sample_description_index", []); stbl.add("stco").set("chunk_offsets", []); stbl.add("stsz").set("sample_sizes", []); this.moov.mvex.add("trex").set("track_id", options.id).set("default_sample_description_index", options.default_sample_description_index || 1).set("default_sample_duration", options.default_sample_duration || 0).set("default_sample_size", options.default_sample_size || 0).set("default_sample_flags", options.default_sample_flags || 0); this.buildTrakSampleLists(trak); return options.id; }; BoxParser.Box.prototype.computeSize = function (stream_) { var stream = stream_ || new DataStream(); stream.endianness = DataStream.BIG_ENDIAN; this.write(stream); }; ISOFile.prototype.addSample = function (track_id, data, _options) { var options = _options || {}; var sample = {}; var trak = this.getTrackById(track_id); if (trak === null) return; sample.number = trak.samples.length; sample.track_id = trak.tkhd.track_id; sample.timescale = trak.mdia.mdhd.timescale; sample.description_index = options.sample_description_index ? options.sample_description_index - 1 : 0; sample.description = trak.mdia.minf.stbl.stsd.entries[sample.description_index]; sample.data = data; sample.size = data.byteLength; sample.alreadyRead = sample.size; sample.duration = options.duration || 1; sample.cts = options.cts || 0; sample.dts = options.dts || 0; sample.is_sync = options.is_sync || false; sample.is_leading = options.is_leading || 0; sample.depends_on = options.depends_on || 0; sample.is_depended_on = options.is_depended_on || 0; sample.has_redundancy = options.has_redundancy || 0; sample.degradation_priority = options.degradation_priority || 0; sample.offset = 0; sample.subsamples = options.subsamples; trak.samples.push(sample); trak.samples_size += sample.size; trak.samples_duration += sample.duration; if (trak.first_dts === undefined) { trak.first_dts = options.dts; } this.processSamples(); var moof = this.createSingleSampleMoof(sample); this.addBox(moof); moof.computeSize(); /* adjusting the data_offset now that the moof size is known*/ moof.trafs[0].truns[0].data_offset = moof.size + 8; //8 is mdat header this.add("mdat").data = new Uint8Array(data); return sample; }; ISOFile.prototype.createSingleSampleMoof = function (sample) { var sample_flags = 0; if (sample.is_sync) sample_flags = 1 << 25; // sample_depends_on_none (I picture) else sample_flags = 1 << 16; // non-sync var moof = new BoxParser.moofBox(); moof.add("mfhd").set("sequence_number", this.nextMoofNumber); this.nextMoofNumber++; var traf = moof.add("traf"); var trak = this.getTrackById(sample.track_id); traf.add("tfhd").set("track_id", sample.track_id).set("flags", BoxParser.TFHD_FLAG_DEFAULT_BASE_IS_MOOF); traf.add("tfdt").set("baseMediaDecodeTime", sample.dts - (trak.first_dts || 0)); traf.add("trun").set("flags", BoxParser.TRUN_FLAGS_DATA_OFFSET | BoxParser.TRUN_FLAGS_DURATION | BoxParser.TRUN_FLAGS_SIZE | BoxParser.TRUN_FLAGS_FLAGS | BoxParser.TRUN_FLAGS_CTS_OFFSET).set("data_offset", 0).set("first_sample_flags", 0).set("sample_count", 1).set("sample_duration", [sample.duration]).set("sample_size", [sample.size]).set("sample_flags", [sample_flags]).set("sample_composition_time_offset", [sample.cts - sample.dts]); return moof; }; // file:src/isofile-sample-processing.js /* Index of the last moof box received */ ISOFile.prototype.lastMoofIndex = 0; /* size of the buffers allocated for samples */ ISOFile.prototype.samplesDataSize = 0; /* Resets all sample tables */ ISOFile.prototype.resetTables = function () { var i; var trak, stco, stsc, stsz, stts, ctts, stss; this.initial_duration = this.moov.mvhd.duration; this.moov.mvhd.duration = 0; for (i = 0; i < this.moov.traks.length; i++) { trak = this.moov.traks[i]; trak.tkhd.duration = 0; trak.mdia.mdhd.duration = 0; stco = trak.mdia.minf.stbl.stco || trak.mdia.minf.stbl.co64; stco.chunk_offsets = []; stsc = trak.mdia.minf.stbl.stsc; stsc.first_chunk = []; stsc.samples_per_chunk = []; stsc.sample_description_index = []; stsz = trak.mdia.minf.stbl.stsz || trak.mdia.minf.stbl.stz2; stsz.sample_sizes = []; stts = trak.mdia.minf.stbl.stts; stts.sample_counts = []; stts.sample_deltas = []; ctts = trak.mdia.minf.stbl.ctts; if (ctts) { ctts.sample_counts = []; ctts.sample_offsets = []; } stss = trak.mdia.minf.stbl.stss; var k = trak.mdia.minf.stbl.boxes.indexOf(stss); if (k != -1) trak.mdia.minf.stbl.boxes[k] = null; } }; ISOFile.initSampleGroups = function (trak, traf, sbgps, trak_sgpds, traf_sgpds) { var l; var k; var sample_group_info; var sample_group_key; function SampleGroupInfo(_type, _parameter, _sbgp) { this.grouping_type = _type; this.grouping_type_parameter = _parameter; this.sbgp = _sbgp; this.last_sample_in_run = -1; this.entry_index = -1; } if (traf) { traf.sample_groups_info = []; } if (!trak.sample_groups_info) { trak.sample_groups_info = []; } for (k = 0; k < sbgps.length; k++) { sample_group_key = sbgps[k].grouping_type + "/" + sbgps[k].grouping_type_parameter; sample_group_info = new SampleGroupInfo(sbgps[k].grouping_type, sbgps[k].grouping_type_parameter, sbgps[k]); if (traf) { traf.sample_groups_info[sample_group_key] = sample_group_info; } if (!trak.sample_groups_info[sample_group_key]) { trak.sample_groups_info[sample_group_key] = sample_group_info; } for (l = 0; l < trak_sgpds.length; l++) { if (trak_sgpds[l].grouping_type === sbgps[k].grouping_type) { sample_group_info.description = trak_sgpds[l]; sample_group_info.description.used = true; } } if (traf_sgpds) { for (l = 0; l < traf_sgpds.length; l++) { if (traf_sgpds[l].grouping_type === sbgps[k].grouping_type) { sample_group_info.fragment_description = traf_sgpds[l]; sample_group_info.fragment_description.used = true; sample_group_info.is_fragment = true; } } } } if (!traf) { for (k = 0; k < trak_sgpds.length; k++) { if (!trak_sgpds[k].used && trak_sgpds[k].version >= 2) { sample_group_key = trak_sgpds[k].grouping_type + "/0"; sample_group_info = new SampleGroupInfo(trak_sgpds[k].grouping_type, 0); if (!trak.sample_groups_info[sample_group_key]) { trak.sample_groups_info[sample_group_key] = sample_group_info; } } } } else { if (traf_sgpds) { for (k = 0; k < traf_sgpds.length; k++) { if (!traf_sgpds[k].used && traf_sgpds[k].version >= 2) { sample_group_key = traf_sgpds[k].grouping_type + "/0"; sample_group_info = new SampleGroupInfo(traf_sgpds[k].grouping_type, 0); sample_group_info.is_fragment = true; if (!traf.sample_groups_info[sample_group_key]) { traf.sample_groups_info[sample_group_key] = sample_group_info; } } } } } }; ISOFile.setSampleGroupProperties = function (trak, sample, sample_number, sample_groups_info) { var k; var index; sample.sample_groups = []; for (k in sample_groups_info) { sample.sample_groups[k] = {}; sample.sample_groups[k].grouping_type = sample_groups_info[k].grouping_type; sample.sample_groups[k].grouping_type_parameter = sample_groups_info[k].grouping_type_parameter; if (sample_number >= sample_groups_info[k].last_sample_in_run) { if (sample_groups_info[k].last_sample_in_run < 0) { sample_groups_info[k].last_sample_in_run = 0; } sample_groups_info[k].entry_index++; if (sample_groups_info[k].entry_index <= sample_groups_info[k].sbgp.entries.length - 1) { sample_groups_info[k].last_sample_in_run += sample_groups_info[k].sbgp.entries[sample_groups_info[k].entry_index].sample_count; } } if (sample_groups_info[k].entry_index <= sample_groups_info[k].sbgp.entries.length - 1) { sample.sample_groups[k].group_description_index = sample_groups_info[k].sbgp.entries[sample_groups_info[k].entry_index].group_description_index; } else { sample.sample_groups[k].group_description_index = -1; // special value for not defined } if (sample.sample_groups[k].group_description_index !== 0) { var description; if (sample_groups_info[k].fragment_description) { description = sample_groups_info[k].fragment_description; } else { description = sample_groups_info[k].description; } if (sample.sample_groups[k].group_description_index > 0) { if (sample.sample_groups[k].group_description_index > 65535) { index = (sample.sample_groups[k].group_description_index >> 16) - 1; } else { index = sample.sample_groups[k].group_description_index - 1; } if (description && index >= 0) { sample.sample_groups[k].description = description.entries[index]; } } else { if (description && description.version >= 2) { if (description.default_group_description_index > 0) { sample.sample_groups[k].description = description.entries[description.default_group_description_index - 1]; } } } } } }; ISOFile.process_sdtp = function (sdtp, sample, number) { if (!sample) { return; } if (sdtp) { sample.is_leading = sdtp.is_leading[number]; sample.depends_on = sdtp.sample_depends_on[number]; sample.is_depended_on = sdtp.sample_is_depended_on[number]; sample.has_redundancy = sdtp.sample_has_redundancy[number]; } else { sample.is_leading = 0; sample.depends_on = 0; sample.is_depended_on = 0; sample.has_redundancy = 0; } }; /* Build initial sample list from sample tables */ ISOFile.prototype.buildSampleLists = function () { var i; var trak; for (i = 0; i < this.moov.traks.length; i++) { trak = this.moov.traks[i]; this.buildTrakSampleLists(trak); } }; ISOFile.prototype.buildTrakSampleLists = function (trak) { var j; var stco, stsc, stsz, stts, ctts, stss, stsd, subs, sbgps, sgpds, stdp; var chunk_run_index, chunk_index, last_chunk_in_run, offset_in_chunk, last_sample_in_chunk; var last_sample_in_stts_run, stts_run_index, last_sample_in_ctts_run, ctts_run_index, last_stss_index, subs_entry_index, last_subs_sample_index; trak.samples = []; trak.samples_duration = 0; trak.samples_size = 0; stco = trak.mdia.minf.stbl.stco || trak.mdia.minf.stbl.co64; stsc = trak.mdia.minf.stbl.stsc; stsz = trak.mdia.minf.stbl.stsz || trak.mdia.minf.stbl.stz2; stts = trak.mdia.minf.stbl.stts; ctts = trak.mdia.minf.stbl.ctts; stss = trak.mdia.minf.stbl.stss; stsd = trak.mdia.minf.stbl.stsd; subs = trak.mdia.minf.stbl.subs; stdp = trak.mdia.minf.stbl.stdp; sbgps = trak.mdia.minf.stbl.sbgps; sgpds = trak.mdia.minf.stbl.sgpds; last_sample_in_stts_run = -1; stts_run_index = -1; last_sample_in_ctts_run = -1; ctts_run_index = -1; last_stss_index = 0; subs_entry_index = 0; last_subs_sample_index = 0; ISOFile.initSampleGroups(trak, null, sbgps, sgpds); if (typeof stsz === "undefined") { return; } /* we build the samples one by one and compute their properties */ for (j = 0; j < stsz.sample_sizes.length; j++) { var sample = {}; sample.number = j; sample.track_id = trak.tkhd.track_id; sample.timescale = trak.mdia.mdhd.timescale; sample.alreadyRead = 0; trak.samples[j] = sample; /* size can be known directly */ sample.size = stsz.sample_sizes[j]; trak.samples_size += sample.size; /* computing chunk-based properties (offset, sample description index)*/ if (j === 0) { chunk_index = 1; /* the first sample is in the first chunk (chunk indexes are 1-based) */ chunk_run_index = 0; /* the first chunk is the first entry in the first_chunk table */ sample.chunk_index = chunk_index; sample.chunk_run_index = chunk_run_index; last_sample_in_chunk = stsc.samples_per_chunk[chunk_run_index]; offset_in_chunk = 0; /* Is there another entry in the first_chunk table ? */ if (chunk_run_index + 1 < stsc.first_chunk.length) { /* The last chunk in the run is the chunk before the next first chunk */ last_chunk_in_run = stsc.first_chunk[chunk_run_index + 1] - 1; } else { /* There is only one entry in the table, it is valid for all future chunks*/ last_chunk_in_run = Infinity; } } else { if (j < last_sample_in_chunk) { /* the sample is still in the current chunk */ sample.chunk_index = chunk_index; sample.chunk_run_index = chunk_run_index; } else { /* the sample is in the next chunk */ chunk_index++; sample.chunk_index = chunk_index; /* reset the accumulated offset in the chunk */ offset_in_chunk = 0; if (chunk_index <= last_chunk_in_run) ; else { chunk_run_index++; /* Is there another entry in the first_chunk table ? */ if (chunk_run_index + 1 < stsc.first_chunk.length) { /* The last chunk in the run is the chunk before the next first chunk */ last_chunk_in_run = stsc.first_chunk[chunk_run_index + 1] - 1; } else { /* There is only one entry in the table, it is valid for all future chunks*/ last_chunk_in_run = Infinity; } } sample.chunk_run_index = chunk_run_index; last_sample_in_chunk += stsc.samples_per_chunk[chunk_run_index]; } } sample.description_index = stsc.sample_description_index[sample.chunk_run_index] - 1; sample.description = stsd.entries[sample.description_index]; sample.offset = stco.chunk_offsets[sample.chunk_index - 1] + offset_in_chunk; /* chunk indexes are 1-based */ offset_in_chunk += sample.size; /* setting dts, cts, duration and rap flags */ if (j > last_sample_in_stts_run) { stts_run_index++; if (last_sample_in_stts_run < 0) { last_sample_in_stts_run = 0; } last_sample_in_stts_run += stts.sample_counts[stts_run_index]; } if (j > 0) { trak.samples[j - 1].duration = stts.sample_deltas[stts_run_index]; trak.samples_duration += trak.samples[j - 1].duration; sample.dts = trak.samples[j - 1].dts + trak.samples[j - 1].duration; } else { sample.dts = 0; } if (ctts) { if (j >= last_sample_in_ctts_run) { ctts_run_index++; if (last_sample_in_ctts_run < 0) { last_sample_in_ctts_run = 0; } last_sample_in_ctts_run += ctts.sample_counts[ctts_run_index]; } sample.cts = trak.samples[j].dts + ctts.sample_offsets[ctts_run_index]; } else { sample.cts = sample.dts; } if (stss) { if (j == stss.sample_numbers[last_stss_index] - 1) { // sample numbers are 1-based sample.is_sync = true; last_stss_index++; } else { sample.is_sync = false; sample.degradation_priority = 0; } if (subs) { if (subs.entries[subs_entry_index].sample_delta + last_subs_sample_index == j + 1) { sample.subsamples = subs.entries[subs_entry_index].subsamples; last_subs_sample_index += subs.entries[subs_entry_index].sample_delta; subs_entry_index++; } } } else { sample.is_sync = true; } ISOFile.process_sdtp(trak.mdia.minf.stbl.sdtp, sample, sample.number); if (stdp) { sample.degradation_priority = stdp.priority[j]; } else { sample.degradation_priority = 0; } if (subs) { if (subs.entries[subs_entry_index].sample_delta + last_subs_sample_index == j) { sample.subsamples = subs.entries[subs_entry_index].subsamples; last_subs_sample_index += subs.entries[subs_entry_index].sample_delta; } } if (sbgps.length > 0 || sgpds.length > 0) { ISOFile.setSampleGroupProperties(trak, sample, j, trak.sample_groups_info); } } if (j > 0) { trak.samples[j - 1].duration = Math.max(trak.mdia.mdhd.duration - trak.samples[j - 1].dts, 0); trak.samples_duration += trak.samples[j - 1].duration; } }; /* Update sample list when new 'moof' boxes are received */ ISOFile.prototype.updateSampleLists = function () { var i, j, k; var default_sample_description_index, default_sample_duration, default_sample_size, default_sample_flags; var last_run_position; var box, moof, traf, trak, trex; var sample; var sample_flags; if (this.moov === undefined) { return; } /* if the input file is fragmented and fetched in multiple downloads, we need to update the list of samples */ while (this.lastMoofIndex < this.moofs.length) { box = this.moofs[this.lastMoofIndex]; this.lastMoofIndex++; if (box.type == "moof") { moof = box; for (i = 0; i < moof.trafs.length; i++) { traf = moof.trafs[i]; trak = this.getTrackById(traf.tfhd.track_id); trex = this.getTrexById(traf.tfhd.track_id); if (traf.tfhd.flags & BoxParser.TFHD_FLAG_SAMPLE_DESC) { default_sample_description_index = traf.tfhd.default_sample_description_index; } else { default_sample_description_index = trex ? trex.default_sample_description_index : 1; } if (traf.tfhd.flags & BoxParser.TFHD_FLAG_SAMPLE_DUR) { default_sample_duration = traf.tfhd.default_sample_duration; } else { default_sample_duration = trex ? trex.default_sample_duration : 0; } if (traf.tfhd.flags & BoxParser.TFHD_FLAG_SAMPLE_SIZE) { default_sample_size = traf.tfhd.default_sample_size; } else { default_sample_size = trex ? trex.default_sample_size : 0; } if (traf.tfhd.flags & BoxParser.TFHD_FLAG_SAMPLE_FLAGS) { default_sample_flags = traf.tfhd.default_sample_flags; } else { default_sample_flags = trex ? trex.default_sample_flags : 0; } traf.sample_number = 0; /* process sample groups */ if (traf.sbgps.length > 0) { ISOFile.initSampleGroups(trak, traf, traf.sbgps, trak.mdia.minf.stbl.sgpds, traf.sgpds); } for (j = 0; j < traf.truns.length; j++) { var trun = traf.truns[j]; for (k = 0; k < trun.sample_count; k++) { sample = {}; sample.moof_number = this.lastMoofIndex; sample.number_in_traf = traf.sample_number; traf.sample_number++; sample.number = trak.samples.length; traf.first_sample_index = trak.samples.length; trak.samples.push(sample); sample.track_id = trak.tkhd.track_id; sample.timescale = trak.mdia.mdhd.timescale; sample.description_index = default_sample_description_index - 1; sample.description = trak.mdia.minf.stbl.stsd.entries[sample.description_index]; sample.size = default_sample_size; if (trun.flags & BoxParser.TRUN_FLAGS_SIZE) { sample.size = trun.sample_size[k]; } trak.samples_size += sample.size; sample.duration = default_sample_duration; if (trun.flags & BoxParser.TRUN_FLAGS_DURATION) { sample.duration = trun.sample_duration[k]; } trak.samples_duration += sample.duration; if (trak.first_traf_merged || k > 0) { sample.dts = trak.samples[trak.samples.length - 2].dts + trak.samples[trak.samples.length - 2].duration; } else { if (traf.tfdt) { sample.dts = traf.tfdt.baseMediaDecodeTime; } else { sample.dts = 0; } trak.first_traf_merged = true; } sample.cts = sample.dts; if (trun.flags & BoxParser.TRUN_FLAGS_CTS_OFFSET) { sample.cts = sample.dts + trun.sample_composition_time_offset[k]; } sample_flags = default_sample_flags; if (trun.flags & BoxParser.TRUN_FLAGS_FLAGS) { sample_flags = trun.sample_flags[k]; } else if (k === 0 && trun.flags & BoxParser.TRUN_FLAGS_FIRST_FLAG) { sample_flags = trun.first_sample_flags; } sample.is_sync = sample_flags >> 16 & 0x1 ? false : true; sample.is_leading = sample_flags >> 26 & 0x3; sample.depends_on = sample_flags >> 24 & 0x3; sample.is_depended_on = sample_flags >> 22 & 0x3; sample.has_redundancy = sample_flags >> 20 & 0x3; sample.degradation_priority = sample_flags & 0xFFFF; //ISOFile.process_sdtp(traf.sdtp, sample, sample.number_in_traf); var bdop = traf.tfhd.flags & BoxParser.TFHD_FLAG_BASE_DATA_OFFSET ? true : false; var dbim = traf.tfhd.flags & BoxParser.TFHD_FLAG_DEFAULT_BASE_IS_MOOF ? true : false; var dop = trun.flags & BoxParser.TRUN_FLAGS_DATA_OFFSET ? true : false; var bdo = 0; if (!bdop) { if (!dbim) { if (j === 0) { // the first track in the movie fragment bdo = moof.start; // the position of the first byte of the enclosing Movie Fragment Box } else { bdo = last_run_position; // end of the data defined by the preceding *track* (irrespective of the track id) fragment in the moof } } else { bdo = moof.start; } } else { bdo = traf.tfhd.base_data_offset; } if (j === 0 && k === 0) { if (dop) { sample.offset = bdo + trun.data_offset; // If the data-offset is present, it is relative to the base-data-offset established in the track fragment header } else { sample.offset = bdo; // the data for this run starts the base-data-offset defined by the track fragment header } } else { sample.offset = last_run_position; // this run starts immediately after the data of the previous run } last_run_position = sample.offset + sample.size; if (traf.sbgps.length > 0 || traf.sgpds.length > 0 || trak.mdia.minf.stbl.sbgps.length > 0 || trak.mdia.minf.stbl.sgpds.length > 0) { ISOFile.setSampleGroupProperties(trak, sample, sample.number_in_traf, traf.sample_groups_info); } } } if (traf.subs) { trak.has_fragment_subsamples = true; var sample_index = traf.first_sample_index; for (j = 0; j < traf.subs.entries.length; j++) { sample_index += traf.subs.entries[j].sample_delta; sample = trak.samples[sample_index - 1]; sample.subsamples = traf.subs.entries[j].subsamples; } } } } } }; /* Try to get sample data for a given sample: returns null if not found returns the same sample if already requested */ ISOFile.prototype.getSample = function (trak, sampleNum) { var buffer; var sample = trak.samples[sampleNum]; if (!this.moov) { return null; } if (!sample.data) { /* Not yet fetched */ sample.data = new Uint8Array(sample.size); sample.alreadyRead = 0; this.samplesDataSize += sample.size; Log.debug("ISOFile", "Allocating sample #" + sampleNum + " on track #" + trak.tkhd.track_id + " of size " + sample.size + " (total: " + this.samplesDataSize + ")"); } else if (sample.alreadyRead == sample.size) { /* Already fetched entirely */ return sample; } /* The sample has only been partially fetched, we need to check in all buffers */ while (true) { var index = this.stream.findPosition(true, sample.offset + sample.alreadyRead, false); if (index > -1) { buffer = this.stream.buffers[index]; var lengthAfterStart = buffer.byteLength - (sample.offset + sample.alreadyRead - buffer.fileStart); if (sample.size - sample.alreadyRead <= lengthAfterStart) { /* the (rest of the) sample is entirely contained in this buffer */ Log.debug("ISOFile", "Getting sample #" + sampleNum + " data (alreadyRead: " + sample.alreadyRead + " offset: " + (sample.offset + sample.alreadyRead - buffer.fileStart) + " read size: " + (sample.size - sample.alreadyRead) + " full size: " + sample.size + ")"); DataStream.memcpy(sample.data.buffer, sample.alreadyRead, buffer, sample.offset + sample.alreadyRead - buffer.fileStart, sample.size - sample.alreadyRead); /* update the number of bytes used in this buffer and check if it needs to be removed */ buffer.usedBytes += sample.size - sample.alreadyRead; this.stream.logBufferLevel(); sample.alreadyRead = sample.size; return sample; } else { /* the sample does not end in this buffer */ if (lengthAfterStart === 0) return null; Log.debug("ISOFile", "Getting sample #" + sampleNum + " partial data (alreadyRead: " + sample.alreadyRead + " offset: " + (sample.offset + sample.alreadyRead - buffer.fileStart) + " read size: " + lengthAfterStart + " full size: " + sample.size + ")"); DataStream.memcpy(sample.data.buffer, sample.alreadyRead, buffer, sample.offset + sample.alreadyRead - buffer.fileStart, lengthAfterStart); sample.alreadyRead += lengthAfterStart; /* update the number of bytes used in this buffer and check if it needs to be removed */ buffer.usedBytes += lengthAfterStart; this.stream.logBufferLevel(); /* keep looking in the next buffer */ } } else { return null; } } }; /* Release the memory used to store the data of the sample */ ISOFile.prototype.releaseSample = function (trak, sampleNum) { var sample = trak.samples[sampleNum]; if (sample.data) { this.samplesDataSize -= sample.size; sample.data = null; sample.alreadyRead = 0; return sample.size; } else { return 0; } }; ISOFile.prototype.getAllocatedSampleDataSize = function () { return this.samplesDataSize; }; /* Builds the MIME Type 'codecs' sub-parameters for the whole file */ ISOFile.prototype.getCodecs = function () { var i; var codecs = ""; for (i = 0; i < this.moov.traks.length; i++) { var trak = this.moov.traks[i]; if (i > 0) { codecs += ","; } codecs += trak.mdia.minf.stbl.stsd.entries[0].getCodec(); } return codecs; }; /* Helper function */ ISOFile.prototype.getTrexById = function (id) { var i; if (!this.moov || !this.moov.mvex) return null; for (i = 0; i < this.moov.mvex.trexs.length; i++) { var trex = this.moov.mvex.trexs[i]; if (trex.track_id == id) return trex; } return null; }; /* Helper function */ ISOFile.prototype.getTrackById = function (id) { if (this.moov === undefined) { return null; } for (var j = 0; j < this.moov.traks.length; j++) { var trak = this.moov.traks[j]; if (trak.tkhd.track_id == id) return trak; } return null; }; // file:src/isofile-item-processing.js ISOFile.prototype.items = []; ISOFile.prototype.entity_groups = []; /* size of the buffers allocated for samples */ ISOFile.prototype.itemsDataSize = 0; ISOFile.prototype.flattenItemInfo = function () { var items = this.items; var entity_groups = this.entity_groups; var i, j; var item; var meta = this.meta; if (meta === null || meta === undefined) return; if (meta.hdlr === undefined) return; if (meta.iinf === undefined) return; for (i = 0; i < meta.iinf.item_infos.length; i++) { item = {}; item.id = meta.iinf.item_infos[i].item_ID; items[item.id] = item; item.ref_to = []; item.name = meta.iinf.item_infos[i].item_name; if (meta.iinf.item_infos[i].protection_index > 0) { item.protection = meta.ipro.protections[meta.iinf.item_infos[i].protection_index - 1]; } if (meta.iinf.item_infos[i].item_type) { item.type = meta.iinf.item_infos[i].item_type; } else { item.type = "mime"; } item.content_type = meta.iinf.item_infos[i].content_type; item.content_encoding = meta.iinf.item_infos[i].content_encoding; } if (meta.grpl) { for (i = 0; i < meta.grpl.boxes.length; i++) { entity_group = {}; entity_group.id = meta.grpl.boxes[i].group_id; entity_group.entity_ids = meta.grpl.boxes[i].entity_ids; entity_group.type = meta.grpl.boxes[i].type; entity_groups[entity_group.id] = entity_group; } } if (meta.iloc) { for (i = 0; i < meta.iloc.items.length; i++) { var itemloc = meta.iloc.items[i]; item = items[itemloc.item_ID]; if (itemloc.data_reference_index !== 0) { Log.warn("Item storage with reference to other files: not supported"); item.source = meta.dinf.boxes[itemloc.data_reference_index - 1]; } switch (itemloc.construction_method) { case 0: // offset into the file referenced by the data reference index break; case 1: // offset into the idat box of this meta box Log.warn("Item storage with construction_method : not supported"); break; case 2: // offset into another item Log.warn("Item storage with construction_method : not supported"); break; } item.extents = []; item.size = 0; for (j = 0; j < itemloc.extents.length; j++) { item.extents[j] = {}; item.extents[j].offset = itemloc.extents[j].extent_offset + itemloc.base_offset; item.extents[j].length = itemloc.extents[j].extent_length; item.extents[j].alreadyRead = 0; item.size += item.extents[j].length; } } } if (meta.pitm) { items[meta.pitm.item_id].primary = true; } if (meta.iref) { for (i = 0; i < meta.iref.references.length; i++) { var ref = meta.iref.references[i]; for (j = 0; j < ref.references.length; j++) { items[ref.from_item_ID].ref_to.push({ type: ref.type, id: ref.references[j] }); } } } if (meta.iprp) { for (var k = 0; k < meta.iprp.ipmas.length; k++) { var ipma = meta.iprp.ipmas[k]; for (i = 0; i < ipma.associations.length; i++) { var association = ipma.associations[i]; item = items[association.id]; if (!item) { item = entity_groups[association.id]; } if (item) { if (item.properties === undefined) { item.properties = {}; item.properties.boxes = []; } for (j = 0; j < association.props.length; j++) { var propEntry = association.props[j]; if (propEntry.property_index > 0 && propEntry.property_index - 1 < meta.iprp.ipco.boxes.length) { var propbox = meta.iprp.ipco.boxes[propEntry.property_index - 1]; item.properties[propbox.type] = propbox; item.properties.boxes.push(propbox); } } } } } } }; ISOFile.prototype.getItem = function (item_id) { var buffer; var item; if (!this.meta) { return null; } item = this.items[item_id]; if (!item.data && item.size) { /* Not yet fetched */ item.data = new Uint8Array(item.size); item.alreadyRead = 0; this.itemsDataSize += item.size; Log.debug("ISOFile", "Allocating item #" + item_id + " of size " + item.size + " (total: " + this.itemsDataSize + ")"); } else if (item.alreadyRead === item.size) { /* Already fetched entirely */ return item; } /* The item has only been partially fetched, we need to check in all buffers to find the remaining extents*/ for (var i = 0; i < item.extents.length; i++) { var extent = item.extents[i]; if (extent.alreadyRead === extent.length) { continue; } else { var index = this.stream.findPosition(true, extent.offset + extent.alreadyRead, false); if (index > -1) { buffer = this.stream.buffers[index]; var lengthAfterStart = buffer.byteLength - (extent.offset + extent.alreadyRead - buffer.fileStart); if (extent.length - extent.alreadyRead <= lengthAfterStart) { /* the (rest of the) extent is entirely contained in this buffer */ Log.debug("ISOFile", "Getting item #" + item_id + " extent #" + i + " data (alreadyRead: " + extent.alreadyRead + " offset: " + (extent.offset + extent.alreadyRead - buffer.fileStart) + " read size: " + (extent.length - extent.alreadyRead) + " full extent size: " + extent.length + " full item size: " + item.size + ")"); DataStream.memcpy(item.data.buffer, item.alreadyRead, buffer, extent.offset + extent.alreadyRead - buffer.fileStart, extent.length - extent.alreadyRead); /* update the number of bytes used in this buffer and check if it needs to be removed */ buffer.usedBytes += extent.length - extent.alreadyRead; this.stream.logBufferLevel(); item.alreadyRead += extent.length - extent.alreadyRead; extent.alreadyRead = extent.length; } else { /* the sample does not end in this buffer */ Log.debug("ISOFile", "Getting item #" + item_id + " extent #" + i + " partial data (alreadyRead: " + extent.alreadyRead + " offset: " + (extent.offset + extent.alreadyRead - buffer.fileStart) + " read size: " + lengthAfterStart + " full extent size: " + extent.length + " full item size: " + item.size + ")"); DataStream.memcpy(item.data.buffer, item.alreadyRead, buffer, extent.offset + extent.alreadyRead - buffer.fileStart, lengthAfterStart); extent.alreadyRead += lengthAfterStart; item.alreadyRead += lengthAfterStart; /* update the number of bytes used in this buffer and check if it needs to be removed */ buffer.usedBytes += lengthAfterStart; this.stream.logBufferLevel(); return null; } } else { return null; } } } if (item.alreadyRead === item.size) { /* fetched entirely */ return item; } else { return null; } }; /* Release the memory used to store the data of the item */ ISOFile.prototype.releaseItem = function (item_id) { var item = this.items[item_id]; if (item.data) { this.itemsDataSize -= item.size; item.data = null; item.alreadyRead = 0; for (var i = 0; i < item.extents.length; i++) { var extent = item.extents[i]; extent.alreadyRead = 0; } return item.size; } else { return 0; } }; ISOFile.prototype.processItems = function (callback) { for (var i in this.items) { var item = this.items[i]; this.getItem(item.id); if (callback && !item.sent) { callback(item); item.sent = true; item.data = null; } } }; ISOFile.prototype.hasItem = function (name) { for (var i in this.items) { var item = this.items[i]; if (item.name === name) { return item.id; } } return -1; }; ISOFile.prototype.getMetaHandler = function () { if (!this.meta) { return null; } else { return this.meta.hdlr.handler; } }; ISOFile.prototype.getPrimaryItem = function () { if (!this.meta || !this.meta.pitm) { return null; } else { return this.getItem(this.meta.pitm.item_id); } }; ISOFile.prototype.itemToFragmentedTrackFile = function (_options) { var options = _options || {}; var item = null; if (options.itemId) { item = this.getItem(options.itemId); } else { item = this.getPrimaryItem(); } if (item == null) return null; var file = new ISOFile(); file.discardMdatData = false; // assuming the track type is the same as the item type var trackOptions = { type: item.type, description_boxes: item.properties.boxes }; if (item.properties.ispe) { trackOptions.width = item.properties.ispe.image_width; trackOptions.height = item.properties.ispe.image_height; } var trackId = file.addTrack(trackOptions); if (trackId) { file.addSample(trackId, item.data); return file; } else { return null; } }; // file:src/isofile-write.js /* Rewrite the entire file */ ISOFile.prototype.write = function (outstream) { for (var i = 0; i < this.boxes.length; i++) { this.boxes[i].write(outstream); } }; ISOFile.prototype.createFragment = function (track_id, sampleNumber, stream_) { var trak = this.getTrackById(track_id); var sample = this.getSample(trak, sampleNumber); if (sample == null) { this.setNextSeekPositionFromSample(trak.samples[sampleNumber]); return null; } var stream = stream_ || new DataStream(); stream.endianness = DataStream.BIG_ENDIAN; var moof = this.createSingleSampleMoof(sample); moof.write(stream); /* adjusting the data_offset now that the moof size is known*/ moof.trafs[0].truns[0].data_offset = moof.size + 8; //8 is mdat header Log.debug("MP4Box", "Adjusting data_offset with new value " + moof.trafs[0].truns[0].data_offset); stream.adjustUint32(moof.trafs[0].truns[0].data_offset_position, moof.trafs[0].truns[0].data_offset); var mdat = new BoxParser.mdatBox(); mdat.data = sample.data; mdat.write(stream); return stream; }; /* Modify the file and create the initialization segment */ ISOFile.writeInitializationSegment = function (ftyp, moov, total_duration, sample_duration) { var i; Log.debug("ISOFile", "Generating initialization segment"); var stream = new DataStream(); stream.endianness = DataStream.BIG_ENDIAN; ftyp.write(stream); /* we can now create the new mvex box */ var mvex = moov.add("mvex"); if (total_duration) { mvex.add("mehd").set("fragment_duration", total_duration); } for (i = 0; i < moov.traks.length; i++) { mvex.add("trex").set("track_id", moov.traks[i].tkhd.track_id).set("default_sample_description_index", 1).set("default_sample_duration", sample_duration).set("default_sample_size", 0).set("default_sample_flags", 1 << 16); } moov.write(stream); return stream.buffer; }; ISOFile.prototype.save = function (name) { var stream = new DataStream(); stream.endianness = DataStream.BIG_ENDIAN; this.write(stream); stream.save(name); }; ISOFile.prototype.getBuffer = function () { var stream = new DataStream(); stream.endianness = DataStream.BIG_ENDIAN; this.write(stream); return stream.buffer; }; ISOFile.prototype.initializeSegmentation = function () { var i; var initSegs; var trak; var seg; if (this.onSegment === null) { Log.warn("MP4Box", "No segmentation callback set!"); } if (!this.isFragmentationInitialized) { this.isFragmentationInitialized = true; this.nextMoofNumber = 0; this.resetTables(); } initSegs = []; for (i = 0; i < this.fragmentedTracks.length; i++) { var moov = new BoxParser.moovBox(); moov.mvhd = this.moov.mvhd; moov.boxes.push(moov.mvhd); trak = this.getTrackById(this.fragmentedTracks[i].id); moov.boxes.push(trak); moov.traks.push(trak); seg = {}; seg.id = trak.tkhd.track_id; seg.user = this.fragmentedTracks[i].user; seg.buffer = ISOFile.writeInitializationSegment(this.ftyp, moov, this.moov.mvex && this.moov.mvex.mehd ? this.moov.mvex.mehd.fragment_duration : undefined, this.moov.traks[i].samples.length > 0 ? this.moov.traks[i].samples[0].duration : 0); initSegs.push(seg); } return initSegs; }; // file:src/box-print.js /* * Copyright (c) Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ BoxParser.Box.prototype.printHeader = function (output) { this.size += 8; if (this.size > MAX_SIZE) { this.size += 8; } if (this.type === "uuid") { this.size += 16; } output.log(output.indent + "size:" + this.size); output.log(output.indent + "type:" + this.type); }; BoxParser.FullBox.prototype.printHeader = function (output) { this.size += 4; BoxParser.Box.prototype.printHeader.call(this, output); output.log(output.indent + "version:" + this.version); output.log(output.indent + "flags:" + this.flags); }; BoxParser.Box.prototype.print = function (output) { this.printHeader(output); }; BoxParser.ContainerBox.prototype.print = function (output) { this.printHeader(output); for (var i = 0; i < this.boxes.length; i++) { if (this.boxes[i]) { var prev_indent = output.indent; output.indent += " "; this.boxes[i].print(output); output.indent = prev_indent; } } }; ISOFile.prototype.print = function (output) { output.indent = ""; for (var i = 0; i < this.boxes.length; i++) { if (this.boxes[i]) { this.boxes[i].print(output); } } }; BoxParser.mvhdBox.prototype.print = function (output) { BoxParser.FullBox.prototype.printHeader.call(this, output); output.log(output.indent + "creation_time: " + this.creation_time); output.log(output.indent + "modification_time: " + this.modification_time); output.log(output.indent + "timescale: " + this.timescale); output.log(output.indent + "duration: " + this.duration); output.log(output.indent + "rate: " + this.rate); output.log(output.indent + "volume: " + (this.volume >> 8)); output.log(output.indent + "matrix: " + this.matrix.join(", ")); output.log(output.indent + "next_track_id: " + this.next_track_id); }; BoxParser.tkhdBox.prototype.print = function (output) { BoxParser.FullBox.prototype.printHeader.call(this, output); output.log(output.indent + "creation_time: " + this.creation_time); output.log(output.indent + "modification_time: " + this.modification_time); output.log(output.indent + "track_id: " + this.track_id); output.log(output.indent + "duration: " + this.duration); output.log(output.indent + "volume: " + (this.volume >> 8)); output.log(output.indent + "matrix: " + this.matrix.join(", ")); output.log(output.indent + "layer: " + this.layer); output.log(output.indent + "alternate_group: " + this.alternate_group); output.log(output.indent + "width: " + this.width); output.log(output.indent + "height: " + this.height); }; // file:src/mp4box.js /* * Copyright (c) 2012-2013. Telecom ParisTech/TSI/MM/GPAC Cyril Concolato * License: BSD-3-Clause (see LICENSE file) */ var MP4Box = {}; MP4Box.createFile = function (_keepMdatData, _stream) { /* Boolean indicating if bytes containing media data should be kept in memory */ var keepMdatData = _keepMdatData !== undefined ? _keepMdatData : true; var file = new ISOFile(_stream); file.discardMdatData = keepMdatData ? false : true; return file; }; { exports.createFile = MP4Box.createFile; } }); mp4box.Log; mp4box.MP4BoxStream; mp4box.DataStream; mp4box.MultiBufferStream; mp4box.MPEG4DescriptorParser; mp4box.BoxParser; mp4box.XMLSubtitlein4Parser; mp4box.Textin4Parser; mp4box.ISOFile; mp4box.createFile; function reduce(array) { return array.reduce((e, t) => 256 * e + t); } function range(array) { const s = [101, 103, 119, 99]; const a = 28; const t = array.length - a, r = array.slice(t, t + s.length); return s.every((e, t) => e === r[t]); } class TransportDescrambler { constructor() { this.s = null; this.a = null; this.l = 0; this.c = 0; this.u = 1 / 0; this.A = false; this.d = false; this.r = 4194304; this.n = new Uint8Array([30, 158, 90, 33, 244, 57, 83, 165, 2, 70, 35, 87, 215, 231, 226, 108]); this.t = this.n.slice().reverse(); } destroy() { this.s = null; this.a = null; this.l = 0; this.c = 0; this.u = 1 / 0; this.A = false; this.d = false; this.r = 4194304; this.n = null; this.t = null; } transport(buffer) { if (!this.s && this.l > 50) { return buffer; } this.l++; if (this.d) { return buffer; } const h = new Uint8Array(buffer); if (this.A) { if (!(this.c < this.u)) { if (this.a && this.s) { this.a.set(h, this.r); this.s.parse(null, this.r, h.byteLength); return this.a.slice(this.r, this.r + h.byteLength); } else { console.error("video_error_2"); this.d = true; return buffer; } } if (range(h)) { this.c++; } } else { const r = function (e, t) { const r = function (e, t) { for (let r = 0; r < e.byteLength - t.length; r++) for (let n = 0; n < t.length && e[r + n] === t[n]; n++) if (n === t.length - 1) return r; return null; }(e, t); if (r) { const t = reduce(e.slice(r + 16, r + 16 + 8)); return [t, reduce(e.slice(r + 24, r + 24 + 8)), function (e) { return e.map(e => ~e); }(e.slice(r + 32, r + 32 + t))]; } return null; }(h, this.t); if (!r) { return buffer; } const l = function (e) { try { if ("object" != typeof WebAssembly || "function" != typeof WebAssembly.instantiate) { throw null; } { const e = new WebAssembly.Module(Uint8Array.of(0, 97, 115, 109, 1, 0, 0, 0)); if (!(e instanceof WebAssembly.Module && new WebAssembly.Instance(e) instanceof WebAssembly.Instance)) throw null; } } catch (e) { return new Error("video_error_4"); } let t; try { t = { env: { __handle_stack_overflow: () => e(new Error("video_error_1")), memory: new WebAssembly.Memory({ initial: 256, maximum: 256 }) } }; } catch (e) { return new Error("video_error_5"); } return t; }(buffer); if (l instanceof Error) { console.error(l.message); this.d = true; return buffer; } this.A = true; this.u = r[1]; if (range(h)) { this.c++; } WebAssembly.instantiate(r[2], l).then(t => { if (!function (e) { return "function" == typeof e.parse && "object" == typeof e.memory; }(t.instance.exports)) { this.d = true; console.error('video_error_3'); return; } this.s = t.instance.exports; this.a = new Uint8Array(this.s.memory.buffer); }).catch(t => { this.d = true; console.error('video_error_6'); }); } return buffer; } } class Fmp4Loader extends CommonLoader { constructor(player) { super(player); this.TAG_NAME = 'Fmp4Loader'; this.player = player; this.mp4Box = mp4box.createFile(); this.tempFmp4List = []; this.offset = 0; this.videoTrackId = null; this.audioTrackId = null; this.isHevc = false; this.transportDescarmber = null; if (this.player._opt.isFmp4Private) { this.transportDescarmber = new TransportDescrambler(); } this._listenMp4Box(); player.debug.log(this.TAG_NAME, 'init'); } destroy() { if (this.mp4Box) { this.mp4Box.flush(); this.mp4Box = null; } if (this.transportDescarmber) { this.transportDescarmber.destroy(); this.transportDescarmber = null; } this.tempFmp4List = []; this.offset = 0; this.videoTrackId = null; this.audioTrackId = null; this.isHevc = false; this.player.debug.log(this.TAG_NAME, 'destroy'); } _listenMp4Box() { this.mp4Box.onReady = this.onReady.bind(this); this.mp4Box.onError = this.onError.bind(this); this.mp4Box.onSamples = this.onSamples.bind(this); } onReady(info) { this.player.debug.log(this.TAG_NAME, 'onReady', info); const videoTrack = info.videoTracks[0]; const audioTrack = info.audioTracks[0]; if (videoTrack) { // this.videoTrackId = videoTrack.id; const seqHeader = this.getSeqHeader(videoTrack); if (seqHeader) { this.player.debug.log(this.TAG_NAME, 'seqHeader', seqHeader); this._doDecodeByFmp4(seqHeader, MEDIA_TYPE.video, 0, true, 0); } this.mp4Box.setExtractionOptions(videoTrack.id); } if (audioTrack && this.player._opt.hasAudio) { this.audioTrackId = audioTrack.id; const audioInfo = audioTrack.audio || {}; const sampleRateIndex = AAC_FREQ_LIST.indexOf(audioInfo.sample_rate); const profile = audioTrack.codec.replace('mp4a.40.', ''); this.mp4Box.setExtractionOptions(audioTrack.id); const config = { profile: parseInt(profile, 10), sampleRate: sampleRateIndex, channel: audioInfo.channel_count }; const aacADTSHeader = aacEncoderConfigurationRecordV2(config); this.player.debug.log(this.TAG_NAME, 'aacADTSHeader', aacADTSHeader, 'config', config); this._doDecodeByFmp4(aacADTSHeader, MEDIA_TYPE.audio, 0, false, 0); } this.mp4Box.start(); } onError(error) { this.player.debug.error(this.TAG_NAME, 'mp4Box onError', error); } onSamples(trackId, ref, samples) { // this.player.debug.log(this.TAG_NAME, 'onSamples', trackId, ref, samples); if (trackId === this.videoTrackId) { for (const sample of samples) { const data = sample.data; const isIFrame = sample.is_sync; const timestamp = 1000 * sample.cts / sample.timescale; 1000 * sample.duration / sample.timescale; this.player.updateStats({ vbps: data.byteLength, dts: timestamp }); if (isIFrame) { this.calcIframeIntervalTimestamp(timestamp); } let packet = null; if (this.isHevc) { packet = hevcEncoderNalePacketNotLength(data, isIFrame); } else { packet = avcEncoderNalePacketNotLength(data, isIFrame); } this._doDecodeByFmp4(packet, MEDIA_TYPE.video, timestamp, isIFrame, 0); } } else if (trackId === this.audioTrackId) { if (this.player._opt.hasAudio) { for (const sample of samples) { const data = sample.data; this.player.updateStats({ abps: data.byteLength }); const timestamp = 1000 * sample.cts / sample.timescale; 1000 * sample.duration / sample.timescale; const packet = new Uint8Array(data.byteLength + 2); packet.set([0xAF, 0x01], 0); packet.set(data, 2); // this.player.debug.log(this.TAG_NAME, 'onSamples: audio', 'timestamp', timestamp, 'duration', duration) this._doDecodeByFmp4(packet, MEDIA_TYPE.audio, timestamp, false, 0); } } } else { this.player.debug.warn(this.TAG_NAME, 'onSamples() trackId error', trackId); } } getSeqHeader(track) { const trak = this.mp4Box.getTrackById(track.id); for (const entry of trak.mdia.minf.stbl.stsd.entries) { if (entry.avcC || entry.hvcC) { const stream = new mp4box.DataStream(undefined, 0, mp4box.DataStream.BIG_ENDIAN); let prevData = []; if (entry.avcC) { entry.avcC.write(stream); prevData = [0x17, 0x00, 0x00, 0x00, 0x00]; } else { this.isHevc = true; entry.hvcC.write(stream); prevData = [0x1c, 0x00, 0x00, 0x00, 0x00]; } const seqHeader = new Uint8Array(stream.buffer, 8); // Remove the box header. const newData = new Uint8Array(prevData.length + seqHeader.length); newData.set(prevData, 0); newData.set(seqHeader, prevData.length); return newData; } } return null; } dispatch(data) { let buffer = new Uint8Array(data); if (typeof data === 'string') { this.player.debug.warn(this.TAG_NAME, 'dispatch()', 'data is string', data); return; } if (typeof data === 'object') { if (this.transportDescarmber) { buffer = this.transportDescarmber.transport(buffer); } // this.tempFmp4List.push(buffer); buffer.buffer.fileStart = this.offset; this.offset += buffer.byteLength; this.mp4Box.appendBuffer(buffer.buffer); } else { this.player.debug.warn(this.TAG_NAME, 'dispatch()', 'data is not object', data); } } downloadFmp4File() { const blob = new Blob(this.tempFmp4List, { type: 'video/mp4; codecs="avc1.640028,mp4a.40.2"' }); const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = now$2() + '.fmp4'; a.click(); URL.revokeObjectURL(url); } } class Mpeg4Loader extends CommonLoader { constructor(player) { super(player); _defineProperty$1(this, "LOG_NAME", 'Mpeg4Loader'); this.player = player; this.player.debug.log(this.LOG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.LOG_NAME, 'destroy'); } } function concatUint8Array() { for (var _len = arguments.length, arr = new Array(_len), _key = 0; _key < _len; _key++) { arr[_key] = arguments[_key]; } arr = arr.filter(Boolean); if (arr.length < 2) return arr[0]; const data = new Uint8Array(arr.reduce((p, c) => p + c.byteLength, 0)); let prevLen = 0; arr.forEach(d => { data.set(d, prevLen); prevLen += d.byteLength; }); return data; } class HlsTsLoader extends CommonLoader { constructor(player) { super(player); this.player = player; this._pmtId = -1; this._remainingPacketData = null; this._videoPesData = []; this._audioPesData = []; this._gopId = 0; this._videoPid = -1; this._audioPid = -1; this._codecType = VIDEO_ENC_CODE.h264; this._audioCodecType = AUDIO_ENC_CODE.AAC; this._vps = null; this._sps = null; this._pps = null; this.TAG_NAME = 'HlsTsLoader'; this._isForHls = true; this.videoTrack = HlsTsLoader.initVideoTrack(); this.audioTrack = HlsTsLoader.initAudioTrack(); this._baseDts = -1; this._baseDtsInited = false; this._basefps = 25; // audio + video this._baseFpsInterval = null; this._tempSampleTsList = []; this._hasAudio = false; this._hasVideo = false; this._audioNextPts = undefined; this._videoNextDts = undefined; this._audioTimestampBreak = false; this._videoTimestampBreak = false; this._lastAudioExceptionGapDot = 0; this._lastAudioExceptionOverlapDot = 0; this._lastAudioExceptionLargeGapDot = 0; this._isSendAACSeqHeader = false; this.workerClearTimeout = null; this.workerUrl = null; this.loopWorker = null; this.tempSampleListInfo = {}; if (this._isUseWorker()) { this._initLoopWorker(); } this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); if (this.workerUrl) { URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; } if (this.workerClearTimeout) { clearTimeout(this.workerClearTimeout); this.workerClearTimeout = null; } if (this.loopWorker) { this.loopWorker.postMessage({ cmd: 'destroy' }); this.loopWorker.terminate(); this.loopWorker = null; } this._stopDecodeLoopInterval(); this.videoTrack = null; this.audioTrack = null; this.tempSampleListInfo = {}; this._baseDts = -1; this._baseDtsInited = false; this._basefps = 25; this._hasCalcFps = false; this._audioNextPts = undefined; this._videoNextDts = undefined; this._audioTimestampBreak = false; this._videoTimestampBreak = false; this._lastAudioExceptionGapDot = 0; this._lastAudioExceptionOverlapDot = 0; this._lastAudioExceptionLargeGapDot = 0; this._isForHls = true; this._isSendAACSeqHeader = false; this.player.debug.log(this.TAG_NAME, 'destroy'); } static initVideoTrack() { return { samples: [] }; } static initAudioTrack() { return { samples: [] }; } static probe(data) { if (!data.length) return false; return data[0] === 0x47 && data[188] === 0x47 && data[376] === 0x47; } _parsePES(data) { const headerDataLen = data[8]; if (headerDataLen === null || headerDataLen === undefined || data.length < headerDataLen + 9) { return; } const startPrefix = data[0] << 16 | data[1] << 8 | data[2]; if (startPrefix !== 1) { this.player.debug.warn(this.TAG_NAME, `_parsePES startPrefix should be 1 but with value ${startPrefix}`); return { error: true, code: 'startPrefixError' }; } const pesLen = (data[4] << 8) + data[5]; if (pesLen && pesLen > data.length - 6) { return; } let pts; let dts; const ptsDtsFlags = data[7]; if (ptsDtsFlags & 0xc0) { pts = (data[9] & 0x0e) * 536870912 + (data[10] & 0xff) * 4194304 + (data[11] & 0xfe) * 16384 + (data[12] & 0xff) * 128 + (data[13] & 0xfe) / 2; if (ptsDtsFlags & 0x40) { dts = (data[14] & 0x0e) * 536870912 + (data[15] & 0xff) * 4194304 + (data[16] & 0xfe) * 16384 + (data[17] & 0xff) * 128 + (data[18] & 0xfe) / 2; if (pts - dts > 60 * 90000) pts = dts; } else { dts = pts; } } return { data: data.subarray(9 + headerDataLen), pts: pts, dts: dts, originalPts: pts, originalDts: dts }; } _demux(data) { let discontinuity = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; let contiguous = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true; if (discontinuity) { this._pmtId = -1; this.videoTrack = HlsTsLoader.initVideoTrack(); this.audioTrack = HlsTsLoader.initAudioTrack(); } // contiguous is false or discontinuity is true if (!contiguous || discontinuity) { this._remainingPacketData = null; this._videoPesData = []; this._audioPesData = []; } else { // 清空上次的pes数据 this.videoTrack.samples = []; this.audioTrack.samples = []; if (this._remainingPacketData) { data = concatUint8Array(this._remainingPacketData, data); this._remainingPacketData = null; } } let dataLen = data.length; // remaining length const remainingLength = dataLen % 188; // if (remainingLength) { this._remainingPacketData = data.subarray(dataLen - remainingLength); dataLen -= remainingLength; } for (let start = 0; start < dataLen; start += 188) { // check start with 0x47 if (data[start] !== 0x47) { throw new Error('TS packet did not start with 0x47'); } const payloadUnitStartIndicator = !!(data[start + 1] & 0x40); const pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2]; const adaptationFiledControl = (data[start + 3] & 0x30) >> 4; let offset; if (adaptationFiledControl > 1) { // [start + 4] adaptation_field_length offset = start + 5 + data[start + 4]; if (offset === start + 188) continue; } else { offset = start + 4; } switch (pid) { case 0: // PAT if (payloadUnitStartIndicator) { offset += data[offset] + 1; } this._pmtId = (data[offset + 10] & 0x1f) << 8 | data[offset + 11]; break; case this._pmtId: { if (payloadUnitStartIndicator) { offset += data[offset] + 1; } const tableEnd = offset + 3 + ((data[offset + 1] & 0x0f) << 8 | data[offset + 2]) - 4; const programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11]; offset += 12 + programInfoLength; while (offset < tableEnd) { const esPid = (data[offset + 1] & 0x1f) << 8 | data[offset + 2]; switch (data[offset]) { case 0x0f: // AAC ADTS //console.log('AAC ADTS pid is', esPid); this._audioPid = esPid; this._audioCodecType = AUDIO_ENC_CODE.AAC; break; case 0x1b: // AVC // console.log('AVC pid is', esPid); if (this._videoPid !== -1) { break; } this._videoPid = esPid; this._codecType = VIDEO_ENC_CODE.h264; break; case 0x24: // HEVC // console.log('HEVC pid is', esPid); if (this._videoPid !== -1) { break; } this._videoPid = esPid; this._codecType = VIDEO_ENC_CODE.h265; break; default: // console.warn(`Unsupported stream. type: ${data[offset]}, pid: ${esPid}`) this.player.debug.log(this.TAG_NAME, `Unsupported stream. type: ${data[offset]}, pid: ${esPid}`); break; } offset += ((data[offset + 3] & 0x0f) << 8 | data[offset + 4]) + 5; } } break; case this._videoPid: if (payloadUnitStartIndicator && this._videoPesData.length) { this._parseVideoData(); } this._videoPesData.push(data.subarray(offset, start + 188)); break; case this._audioPid: if (payloadUnitStartIndicator && this._audioPesData.length) { this._parseAudioData(); } this._audioPesData.push(data.subarray(offset, start + 188)); break; case 17: case 0x1fff: break; default: // console.warn(`Unknown pid: ${pid}`); this.player.debug.log(this.TAG_NAME, `Unknown pid: ${pid}`); break; } } this._parseVideoData(); this._parseAudioData(); this.audioTrack.formatTimescale = this.videoTrack.formatTimescale = this.videoTrack.timescale = 90000; this.audioTrack.timescale = this.audioTrack.sampleRate || 0; } /** * * @param data * @param discontinuity 是否是分片的第一个ts * @param contiguous 是否是连续的ts * @param startTime ts的开始时间 */ demuxAndFix(data, discontinuity, contiguous, startTime) { // if (!this.player._times.demuxStart) { this.player._times.demuxStart = now$2(); } this._demux(data, discontinuity, contiguous); this._fix(startTime, discontinuity, contiguous); } _parseVideoData() { if (!this._videoPesData.length) { // console.log('_parseVideoData', 'no video pes data'); this.player.debug.log(this.TAG_NAME, '_parseVideoData', 'no video pes data'); return; } // const pes = this._parsePES(concatUint8Array(...this._videoPesData)); if (!pes || pes.error) { if (pes && pes.code === 'startPrefixError') { this._videoPesData = []; } return; } const units = parseAnnexB(pes.data); if (units) { this._createVideoSample(units, pes.pts, pes.dts); } else { this.player.debug.warn(this.TAG_NAME, 'Cannot parse avc units', pes); } this._videoPesData = []; } _createVideoSample(units, pts, dts) { if (!units.length) { return; } const isHevc = this._codecType === VIDEO_ENC_CODE.h265; const videoSample = { isIFrame: false, type: MEDIA_TYPE.video, isHevc, vps: null, sps: null, pps: null, pts, dts, payload: null }; units.forEach(unit => { const type = isHevc ? unit[0] >>> 1 & 0x3f : unit[0] & 0x1f; switch (type) { case 5: // IDR case 16: // HEVC BLA_W_LP case 17: // HEVC BLA_W_RADL case 18: // HEVC BLA_N_LP case 19: // HEVC IDR_W_RADL case 20: // HEVC IDR_N_LP case 21: // HEVC CRA_NUT case 22: // HEVC RSV_IRAP_VCL22 case 23: // HEVC RSV_IRAP_VCL23 if (!isHevc && type !== 5 || isHevc && type === 5) { break; } videoSample.isIFrame = true; this._gopId++; break; case 6: // SEI case 39: // HEVC PREFIX_SEI case 40: // HEVC SUFFIX_SEI if (!isHevc && type !== 6 || isHevc && type === 6) { break; } // todo sei信息 parseSEI(removeEPB(unit), isHevc); // if (this.player._opt.isEmitSEI) { // this.player.emit(EVENTS.videoSEI, { // ts: dts / 90, // data: unit // }) // } // fix 分割nal之前只要sei信息被当做单独一个sample return; case 32: // HEVC VPS if (!isHevc) { break; } if (!videoSample.vps) { videoSample.vps = unit; } // todo vps信息 break; case 7: // SPS case 33: // HEVC SPS if (!isHevc && type !== 7 || isHevc && type === 7) { break; } if (!videoSample.sps) { videoSample.sps = unit; } // todo sps信息 //console.log('sps', unit); break; case 8: // PPS case 34: // HEVC PPS if (!isHevc && type !== 8 || isHevc && type === 8) { break; } if (!videoSample.pps) { videoSample.pps = unit; } // todo pps信息 break; } // 264/265 merge unit if (isHevc && isNotHevcSeqHead(type) || !isHevc && isNotAvcSeqHead(type)) { // 拼接 const newUnit = addNaleHeaderLength(unit); if (!videoSample.payload) { videoSample.payload = newUnit; } else { // merge // this.player.debug.log(this.TAG_NAME, 'append nal unit to payload'); const newBuffer = new Uint8Array(videoSample.payload.byteLength + newUnit.byteLength); newBuffer.set(videoSample.payload, 0); newBuffer.set(newUnit, videoSample.payload.byteLength); videoSample.payload = newBuffer; } } }); let seqHeader = null; if (isHevc) { if (videoSample.sps && videoSample.vps && videoSample.pps) { seqHeader = hevcEncoderConfigurationRecord$2({ vps: videoSample.vps, sps: videoSample.sps, pps: videoSample.pps }); } } else { if (videoSample.sps && videoSample.pps) { seqHeader = avcEncoderConfigurationRecord$2({ sps: videoSample.sps, pps: videoSample.pps }); } } if (seqHeader) { this.player.debug.log(this.TAG_NAME, '_createVideoSample', 'seqHeader'); this._doDecodeByHls(seqHeader, MEDIA_TYPE.video, Math.round(videoSample.pts / 90), true, 0); } if (videoSample.isIFrame) { this.calcIframeIntervalTimestamp(Math.round(videoSample.dts / 90)); } this.videoTrack.samples = this.videoTrack.samples.concat(videoSample); } _parseAudioData() { if (!this._audioPesData.length) return; if (!this.player._opt.hasAudio) { return; } const pes = this._parsePES(concatUint8Array(...this._audioPesData)); if (!pes || pes.error) { if (pes && pes.code === 'startPrefixError') { this._audioPesData = []; } return; } // console.log('_parseAudioData', pes.dts, pes.pts); if (this._audioCodecType === AUDIO_ENC_CODE.AAC) { const ret = parseADTS(pes.data, pes.originalPts); if (ret) { this.audioTrack.codec = ret.codec; this.audioTrack.sampleRate = ret.sampleRate; this.audioTrack.channelCount = ret.channelCount; if (!this._isSendAACSeqHeader) { // 先要取 accADTSHeader = []; const accADTSHeader = aacEncoderConfigurationRecordV2({ profile: ret.objectType, sampleRate: ret.samplingFrequencyIndex, channel: ret.channelCount }); this._isSendAACSeqHeader = true; this.player.debug.log(this.TAG_NAME, 'aac seq header', `profile: ${ret.objectType}, sampleRate:${ret.sampleRate},sampleRateIndex: ${ret.samplingFrequencyIndex}, channel: ${ret.channelCount}`); this._doDecodeByHls(accADTSHeader, MEDIA_TYPE.audio, 0, false, 0); } if (this._isSendAACSeqHeader) { const audioSampleList = []; ret.frames.forEach(s => { const pts = s.pts; const arrayBuffer = new Uint8Array(s.data.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(s.data, 2); const audioSample = { type: MEDIA_TYPE.audio, pts, dts: pts, payload: arrayBuffer }; audioSampleList.push(audioSample); }); this.audioTrack.samples = this.audioTrack.samples.concat(audioSampleList); } else { this.player.debug.warn(this.TAG_NAME, `aac seq header not send`); } } else { this.player.debug.warn(this.TAG_NAME, `aac parseADTS error`); } } this._audioPesData = []; } _fix() { let startTime = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; let discontinuity = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; let contiguous = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true; startTime = Math.round(startTime * 90000); const videoTrack = this.videoTrack; const audioTrack = this.audioTrack; const vSamples = videoTrack.samples; const aSamples = audioTrack.samples; if (!vSamples.length && !aSamples.length) { return; } const firstVideoSample = vSamples[0]; const firstAudioSample = aSamples[0]; // consider av delta let vaDelta = 0; if (vSamples.length && aSamples.length) { vaDelta = firstVideoSample.dts - firstAudioSample.pts; } if (!this._baseDtsInited) { this._calculateBaseDts(); } // recalc baseDts if (discontinuity) { this._calculateBaseDts(); this._baseDts -= startTime; } if (!contiguous) { /** * segment.start = min(a, v) * segment.start * | * a * -- vaDelta -- * v */ this._videoNextDts = vaDelta > 0 ? startTime + vaDelta : startTime; this._audioNextPts = vaDelta > 0 ? startTime : startTime - vaDelta; const vDeltaToNextDts = firstVideoSample ? firstVideoSample.dts - this._baseDts - this._videoNextDts : 0; const aDeltaToNextDts = firstAudioSample ? firstAudioSample.pts - this._baseDts - this._audioNextPts : 0; // 90000 = 1s if (Math.abs(vDeltaToNextDts || aDeltaToNextDts) > TS_MAX_VIDEO_FRAME_DURATION) { this._calculateBaseDts(this.audioTrack, this.videoTrack); this._baseDts -= startTime; } } this._resetBaseDtsWhenStreamBreaked(); // fix audio first this._fixAudio(audioTrack); this._fixVideo(videoTrack); let allSampleList = videoTrack.samples.concat(audioTrack.samples); // sort allSampleList = allSampleList.map(sample => { sample.dts = Math.round(sample.dts / 90); sample.pts = Math.round(sample.pts / 90); sample.cts = sample.pts - sample.dts; return sample; }).sort((a, b) => { return a.dts - b.dts; }); allSampleList.forEach(sample => { const arrayBuffer = new Uint8Array(sample.payload); delete sample.payload; if (this._isUseWorker()) { this.loopWorker.postMessage({ ...sample, payload: arrayBuffer, cmd: 'sample' }, [arrayBuffer.buffer]); } else { if (sample.type === MEDIA_TYPE.video) { // 直接解码 this._doDecodeVideo({ ...sample, payload: arrayBuffer }); } else if (sample.type === MEDIA_TYPE.audio) { this._doDecodeAudio({ ...sample, payload: arrayBuffer }); } } }); if (isFalse(this._hasCalcFps) && this._isUseWorker()) { this._hasCalcFps = true; this._calcDecodeFps(allSampleList); } // todo 如果缓存数据超过一定量,需要清理,防止延迟过高 } _isUseWorker() { return !this.player.isUseMSE() && this.isForHls(); } _calculateBaseDts() { const audioTrack = this.audioTrack; const videoTrack = this.videoTrack; const audioSamps = audioTrack.samples; const videoSamps = videoTrack.samples; if (!audioSamps.length && !videoSamps.length) { return false; } let audioBasePts = Infinity; let videoBaseDts = Infinity; if (audioSamps.length) { audioTrack.baseDts = audioBasePts = audioSamps[0].pts; } if (videoSamps.length) { videoTrack.baseDts = videoBaseDts = videoSamps[0].dts; } this._baseDts = Math.min(audioBasePts, videoBaseDts); const delta = videoBaseDts - audioBasePts; if (Number.isFinite(delta) && Math.abs(delta) > TS_LARGE_AV_FIRST_FRAME_GAP) { // warn: LARGE_AV_SHIFT this.player.debug.warn(this.TAG_NAME, `large av first frame gap, video pts: ${videoBaseDts}, audio pts: ${audioBasePts}, base dts: ${this._baseDts}, detect is: ${delta}`); } this._baseDtsInited = true; return true; } _resetBaseDtsWhenStreamBreaked() { if (this._baseDtsInited && this._videoTimestampBreak && this._audioTimestampBreak) { /** * timestamp breaked * _audioNextDts * ---------------------| * (_baseDts) _videoNextDts * ----------------------| * <---------------- * nextVideo.dts * ----------------------------------------| * nextAudio.dts * ---------------------------------------| */ // calc baseDts base on new samples const calc = this._calculateBaseDts(this.audioTrack, this.videoTrack); if (!calc) return; // consider the expect dts for next frame this._baseDts -= Math.min(this._audioNextPts, this._videoNextDts); this._audioLastSample = null; this._videoLastSample = null; this._videoTimestampBreak = false; this._audioTimestampBreak = false; } } _fixAudio(audioTrack) { const samples = audioTrack.samples; if (!samples.length) return; samples.forEach(x => { x.pts -= this._baseDts; x.dts = x.pts; }); this._doFixAudioInternal(audioTrack, samples, 90000); } _fixVideo(videoTrack) { const samples = videoTrack.samples; if (!samples.length) return; samples.forEach(x => { x.dts -= this._baseDts; x.pts -= this._baseDts; }); if (this._videoNextDts === undefined) { const samp0 = samples[0]; this._videoNextDts = samp0.dts; } const len = samples.length; let sampleDuration = 0; const firstSample = samples[0]; const nextSample = samples[1]; const vDelta = this._videoNextDts - firstSample.dts; // 90000 / 2 // 500ms if (Math.abs(vDelta) > TS_MAX_DTS_DELTA_WITH_NEXT_CHUNK) { // resolve first frame first firstSample.dts += vDelta; firstSample.pts += vDelta; this.player.debug.warn(this.TAG_NAME, `large video gap between chunk, next dts is ${this._videoNextDts}, first dts is ${firstSample.dts}, next dts is ${nextSample.dts}, duration is ${vDelta}`); // check to ajust the whole segment // // 1s if (nextSample && Math.abs(nextSample.dts - firstSample.dts) > TS_MAX_VIDEO_FRAME_DURATION) { this._videoTimestampBreak = true; samples.forEach((x, i) => { if (i === 0) return; x.dts += vDelta; x.pts += vDelta; }); } } let refSampleDurationInt; const first = videoTrack.samples[0]; const last = videoTrack.samples[len - 1]; // 100ms default refSampleDurationInt = len === 1 ? 9000 : Math.floor((last.dts - first.dts) / (len - 1)); for (let i = 0; i < len; i++) { const dts = samples[i].dts; const nextSample = samples[i + 1]; if (i < len - 1) { sampleDuration = nextSample.dts - dts; } else if (samples[i - 1]) { sampleDuration = Math.min(dts - samples[i - 1].dts, refSampleDurationInt); } else { sampleDuration = refSampleDurationInt; } if (sampleDuration > TS_MAX_VIDEO_FRAME_DURATION || sampleDuration < 0) { // dts exception of adjacent frame this._videoTimestampBreak = true; // check if only video breaked! sampleDuration = this._audioTimestampBreak ? refSampleDurationInt : Math.max(sampleDuration, 30 * 90); // 30ms // check if sample breaked within current fragment const expectFragEnd = this._audioNextPts || 0; if (nextSample && nextSample.dts > expectFragEnd) { sampleDuration = refSampleDurationInt; } // todo: LARGE_VIDEO_GAP this.player.debug.warn(this.TAG_NAME, `large video gap between frames, time is ${dts / videoTrack.timescale}, dts is ${dts}, origin dts is ${samples[i].originalDts}, next dts is ${this._videoNextDts}, sample Duration is ${sampleDuration} , ref Sample DurationInt is ${refSampleDurationInt}`); } samples[i].duration = sampleDuration; this._videoNextDts += sampleDuration; } } _doFixAudioInternal(audioTrack, samples, timescale) { if (!audioTrack.sampleDuration) { audioTrack.sampleDuration = getFrameDuration$1(audioTrack.timescale, timescale); } const refSampleDuration = audioTrack.sampleDuration; if (this._audioNextPts === undefined) { const samp0 = samples[0]; this._audioNextPts = samp0.pts; } for (let i = 0; i < samples.length; i++) { const nextPts = this._audioNextPts; const sample = samples[i]; const delta = sample.pts - nextPts; // fill frames // delta >= 3 * refSampleDurationInt // delta <= 500s if (!this._audioTimestampBreak && delta >= TS_AUDIO_GAP_OVERLAP_THRESHOLD_COUNT * refSampleDuration && delta <= TS_MAX_SILENT_FRAME_DURATION && !isSafari()) { getSilentFrame(audioTrack.codec, audioTrack.channelCount) || samples[0].data.subarray(); const count = Math.floor(delta / refSampleDuration); if (Math.abs(sample.pts - this._lastAudioExceptionGapDot) > TS_AUDIO_EXCETION_LOG_EMIT_DURATION) { this._lastAudioExceptionGapDot = sample.pts; } // todo:WarningType.AUDIO_FILLED this.player.debug.warn(this.TAG_NAME, `audio gap detected, pts is ${samples.pts}, originPts is ${samples.originalPts}, count is ${count}, nextPts is ${nextPts}, ref sample duration is ${refSampleDuration}`); for (let j = 0; j < count; j++) { // const silentSample = new AudioSample(Math.floor(nextPts), silentFrame) // silentSample.originPts = Math.floor(this._baseDts + nextPts) // samples.splice(i, 0, silentSample) this._audioNextPts += refSampleDuration; i++; } i--; // delta <= -3 * refSampleDurationInt // delta >= -500ms } else if (delta <= -TS_AUDIO_GAP_OVERLAP_THRESHOLD_COUNT * refSampleDuration && delta >= -1 * TS_MAX_SILENT_FRAME_DURATION) { // need discard frames if (Math.abs(sample.pts - this._lastAudioExceptionOverlapDot) > TS_AUDIO_EXCETION_LOG_EMIT_DURATION) { this._lastAudioExceptionOverlapDot = sample.pts; // todo:AUDIO_DROPPED this.player.debug.warn(this.TAG_NAME, `audio overlap detected, pts is ${sample.pts}, originPts is ${sample.originalPts}, nextPts is ${nextPts}, ref sample duration is ${refSampleDuration}`); } samples.splice(i, 1); i--; } else { if (Math.abs(delta) >= TS_MAX_SILENT_FRAME_DURATION) { this._audioTimestampBreak = true; if (Math.abs(sample.pts - this._lastAudioExceptionLargeGapDot) > TS_AUDIO_EXCETION_LOG_EMIT_DURATION) { this._lastAudioExceptionLargeGapDot = sample.pts; // todo: LARGE_AUDIO_GAP this.player.debug.warn(this.TAG_NAME, `large audio gap detected, time is ${sample.pts / 1000} pts is ${sample.pts}, originPts is ${sample.originalPts}, nextPts is ${nextPts}, sample duration is ${delta} ref sample duration is ${refSampleDuration}`); } } sample.dts = sample.pts = nextPts; this._audioNextPts += refSampleDuration; } } } _calcDecodeFps(sampleList) { const _tempSampleTsList = sampleList.map(sample => { return { ts: sample.dts || sample.pts, type: sample.type }; }); const streamVideoFps = calcStreamFpsByBufferList(_tempSampleTsList, MEDIA_TYPE.video); if (streamVideoFps) { this.player.debug.log(this.TAG_NAME, `_calcDecodeFps() video fps is ${streamVideoFps}, update base fps is ${this._basefps}`); this._basefps = streamVideoFps; } this._postMessageToLoopWorker('updateBaseFps', { baseFps: this._basefps }); } _initLoopWorker() { this.player.debug.log(this.TAG_NAME, '_initLoopWorker()'); // worker fun // worker 里面跑interval function LoopWorkerFun() { const MEDIA_TYPE = { audio: 1, video: 2 }; class LoopWorker { constructor() { this.baseFps = 0; this.fpsInterval = null; this.preLoopTimestamp = null; this.startBpsTime = null; this.allSampleList = []; } destroy() { this._clearInterval(); this.baseFps = 0; this.allSampleList = []; this.preLoopTimestamp = null; this.startBpsTime = null; } updateBaseFps(baseFps) { this.baseFps = baseFps; this._clearInterval(); this._startInterval(); } pushSample(sample) { delete sample.cmd; this.allSampleList.push(sample); } _startInterval() { const fragDuration = Math.ceil(1000 / this.baseFps); this.fpsInterval = setInterval(() => { let nowTime = new Date().getTime(); if (!this.preLoopTimestamp) { this.preLoopTimestamp = nowTime; } if (!this.startBpsTime) { this.startBpsTime = nowTime; } const diffTime = nowTime - this.preLoopTimestamp; if (diffTime > fragDuration * 2) { console.warn(`JbPro:[TsLoader LoopWorker] loop interval is ${diffTime}ms, more than ${fragDuration} * 2ms`); } this._loop(); this.preLoopTimestamp = new Date().getTime(); if (this.startBpsTime) { const timestamp = nowTime - this.startBpsTime; if (timestamp >= 1000) { this._calcSampleList(); this.startBpsTime = nowTime; } } }, fragDuration); } _clearInterval() { if (this.fpsInterval) { clearInterval(this.fpsInterval); this.fpsInterval = null; } } _calcSampleList() { const tempObj = { buferredDuration: 0, allListLength: this.allSampleList.length, audioListLength: 0, videoListLength: 0 }; this.allSampleList.forEach(sample => { if (sample.type === MEDIA_TYPE.video) { tempObj.videoListLength++; if (sample.duration) { tempObj.buferredDuration += Math.round(sample.duration / 90); } } else if (sample.type === MEDIA_TYPE.audio) { tempObj.audioListLength++; } }); postMessage({ cmd: 'sampleListInfo', ...tempObj }); } _loop() { let sample = null; if (this.allSampleList.length) { sample = this.allSampleList.shift(); if (sample.type === MEDIA_TYPE.video) { postMessage({ cmd: 'decodeVideo', ...sample }, [sample.payload.buffer]); // check next is audio let tempSample = this.allSampleList[0]; // 这边是检查了所有的audio while (tempSample && tempSample.type === MEDIA_TYPE.audio) { sample = this.allSampleList.shift(); postMessage({ cmd: 'decodeAudio', ...sample }, [sample.payload.buffer]); tempSample = this.allSampleList[0]; } } else if (sample.type === MEDIA_TYPE.audio) { postMessage({ cmd: 'decodeAudio', ...sample }, [sample.payload.buffer]); // check next is video // todo:这个就检查了一个。 if (this.allSampleList.length && this.allSampleList[0].type === MEDIA_TYPE.video) { sample = this.allSampleList.shift(); postMessage({ cmd: 'decodeVideo', ...sample }, [sample.payload.buffer]); } } } } } let loopWorker = new LoopWorker(); self.onmessage = e => { const msg = e.data; switch (msg.cmd) { case 'updateBaseFps': loopWorker.updateBaseFps(msg.baseFps); break; case 'sample': loopWorker.pushSample(msg); break; case 'destroy': loopWorker.destroy(); loopWorker = null; break; } }; } const loopWorkerString = function2String(LoopWorkerFun.toString()); const blob = new Blob([loopWorkerString], { type: "text/javascript" }); const workerUrl = URL.createObjectURL(blob); let loopWorker = new Worker(workerUrl); this.workerUrl = workerUrl; // 必须要释放,不然每次调用内存都明显泄露内存 // chrome 83 file协议下如果直接释放,将会使WebWorker无法启动 this.workerClearTimeout = setTimeout(() => { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; this.workerClearTimeout = null; }, URL_OBJECT_CLEAR_TIME); loopWorker.onmessage = event => { const msg = event.data; switch (msg.cmd) { case 'decodeVideo': this._doDecodeVideo(msg); break; case 'decodeAudio': this._doDecodeAudio(msg); break; case 'sampleListInfo': this.tempSampleListInfo = msg; break; } }; this.loopWorker = loopWorker; } _postMessageToLoopWorker(cmd, options) { if (!this._isUseWorker()) { return; } if (this.loopWorker) { this.loopWorker.postMessage({ cmd, ...options }); } else { this.player.debug.warn(this.TAG_NAME, `loop worker is not init, can not post message`); } } _doDecodeAudio(sample) { const uint8Array = new Uint8Array(sample.payload); this.player.updateStats({ abps: uint8Array.byteLength }); let payloadBuffer = uint8Array; if (isTrue(this.player._opt.m7sCryptoAudio)) { payloadBuffer = this.cryptoPayloadAudio(uint8Array); } if (this.isForHls()) { this._doDecodeByHls(payloadBuffer, MEDIA_TYPE.audio, sample.dts, false, 0); } else { this._doDecodeByTs(payloadBuffer, MEDIA_TYPE.audio, sample.dts, false, 0); } } _doDecodeVideo(sample) { const uint8Array = new Uint8Array(sample.payload); let packet = null; if (sample.isHevc) { // add 5 header packet = hevcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } else { packet = avcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } this.player.updateStats({ dts: sample.dts, vbps: packet.byteLength }); const cts = sample.pts - sample.dts; let payloadBuffer = this.cryptoPayload(packet, sample.isIFrame); if (this.isForHls()) { this._doDecodeByHls(payloadBuffer, MEDIA_TYPE.video, sample.dts, sample.isIFrame, cts); } else { this._doDecodeByTs(payloadBuffer, MEDIA_TYPE.video, sample.dts, sample.isIFrame, cts); } } _stopDecodeLoopInterval() { if (this._baseFpsInterval) { clearInterval(this._baseFpsInterval); this._baseFpsInterval = null; } } getBuferredDuration() { return this.tempSampleListInfo.buferredDuration || 0; } getSampleListLength() { return this.tempSampleListInfo.allListLength || 0; } getSampleAudioListLength() { return this.tempSampleListInfo.audioListLength || 0; } getSampleVideoListLength() { return this.tempSampleListInfo.videoListLength || 0; } isForHls() { return this._isForHls; } } const TS_STREAM_TYPE = { kMPEG1Audio: 0x03, kMPEG2Audio: 0x04, kPESPrivateData: 0x06, kADTSAAC: 0x0F, kLOASAAC: 0x11, kAC3: 0x81, kEAC3: 0x87, kMetadata: 0x15, kSCTE35: 0x86, kH264: 0x1b, kH265: 0x24 }; class SliceQueue { constructor() { this.slices = []; this.total_length = 0; this.expected_length = 0; this.random_access_indicator = 0; } } class PESData { constructor() { this.pid = null; this.data = null; this.stream_type = null; this.random_access_indicator = null; } } class PESPrivateData { constructor() { this.pid = null; this.stream_id = null; this.len = null; this.data = null; this.pts = null; this.nearest_pts = null; this.dts = null; } } const MPEG4SamplingFrequencies = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; class MediaInfo { constructor() { this.mimeType = null; this.duration = null; this.hasAudio = null; this.hasVideo = null; this.audioCodec = null; this.videoCodec = null; this.audioDataRate = null; this.videoDataRate = null; this.audioSampleRate = null; this.audioChannelCount = null; this.width = null; this.height = null; this.fps = null; this.profile = null; this.level = null; this.refFrames = null; this.chromaFormat = null; this.sarNum = null; this.sarDen = null; this.metadata = null; this.segments = null; // MediaInfo[] this.segmentCount = null; this.hasKeyframesIndex = null; this.keyframesIndex = null; } isComplete() { let audioInfoComplete = this.hasAudio === false || this.hasAudio === true && this.audioCodec != null && this.audioSampleRate != null && this.audioChannelCount != null; let videoInfoComplete = this.hasVideo === false || this.hasVideo === true && this.videoCodec != null && this.width != null && this.height != null && this.fps != null && this.profile != null && this.level != null && this.refFrames != null && this.chromaFormat != null && this.sarNum != null && this.sarDen != null; // keyframesIndex may not be present return this.mimeType != null && audioInfoComplete && videoInfoComplete; } isSeekable() { return this.hasKeyframesIndex === true; } getNearestKeyframe(milliseconds) { if (this.keyframesIndex == null) { return null; } let table = this.keyframesIndex; let keyframeIdx = this._search(table.times, milliseconds); return { index: keyframeIdx, milliseconds: table.times[keyframeIdx], fileposition: table.filepositions[keyframeIdx] }; } _search(list, value) { let idx = 0; let last = list.length - 1; let mid = 0; let lbound = 0; let ubound = last; if (value < list[0]) { idx = 0; lbound = ubound + 1; // skip search } while (lbound <= ubound) { mid = lbound + Math.floor((ubound - lbound) / 2); if (mid === last || value >= list[mid] && value < list[mid + 1]) { idx = mid; break; } else if (list[mid] < value) { lbound = mid + 1; } else { ubound = mid - 1; } } return idx; } } class AudioSpecificConfig { constructor(frame) { let config = null; let original_audio_object_type = frame.audio_object_type; let audio_object_type = frame.audio_object_type; let sampling_index = frame.sampling_freq_index; let channel_config = frame.channel_config; let extension_sampling_index = 0; let userAgent = navigator.userAgent.toLowerCase(); if (userAgent.indexOf('firefox') !== -1) { // firefox: use SBR (HE-AAC) if freq less than 24kHz if (sampling_index >= 6) { audio_object_type = 5; config = new Array(4); extension_sampling_index = sampling_index - 3; } else { // use LC-AAC audio_object_type = 2; config = new Array(2); extension_sampling_index = sampling_index; } } else if (userAgent.indexOf('android') !== -1) { // android: always use LC-AAC audio_object_type = 2; config = new Array(2); extension_sampling_index = sampling_index; } else { // for other browsers, e.g. chrome... // Always use HE-AAC to make it easier to switch aac codec profile audio_object_type = 5; extension_sampling_index = sampling_index; config = new Array(4); if (sampling_index >= 6) { extension_sampling_index = sampling_index - 3; } else if (channel_config === 1) { // Mono channel audio_object_type = 2; config = new Array(2); extension_sampling_index = sampling_index; } } config[0] = audio_object_type << 3; config[0] |= (sampling_index & 0x0F) >>> 1; config[1] = (sampling_index & 0x0F) << 7; config[1] |= (channel_config & 0x0F) << 3; if (audio_object_type === 5) { config[1] |= (extension_sampling_index & 0x0F) >>> 1; config[2] = (extension_sampling_index & 0x01) << 7; // extended audio object type: force to 2 (LC-AAC) config[2] |= 2 << 2; config[3] = 0; } this.config = config; this.sampling_rate = MPEG4SamplingFrequencies[sampling_index]; this.sampling_index = sampling_index; this.channel_count = channel_config; this.object_type = audio_object_type; this.original_object_type = original_audio_object_type; this.codec_mimetype = 'mp4a.40.' + audio_object_type; this.original_codec_mimetype = 'mp4a.40.' + original_audio_object_type; } } class TsLoaderV2 extends CommonLoader { constructor(player) { super(player); this.player = player; this.TAG_NAME = 'TsLoaderV2'; this.first_parse_ = true; this.tsPacketSize = 0; this.syncOffset = 0; this.pmt_ = null; this.config_ = null; this.media_info_ = new MediaInfo(); this.timescale_ = 90; this.duration_ = 0; this.pat_ = { version_number: 0, network_pid: 0, program_map_pid: {} }; this.current_program_ = null; this.current_pmt_pid_ = -1; this.program_pmt_map_ = {}; this.pes_slice_queues_ = {}; this.section_slice_queues_ = {}; this.video_metadata_ = { vps: null, sps: null, pps: null, details: null }; this.audio_metadata_ = { codec: null, audio_object_type: null, sampling_freq_index: null, sampling_frequency: null, channel_config: null }; this.last_pcr_ = null; this.audio_last_sample_pts_ = undefined; this.aac_last_incomplete_data_ = null; this.has_video_ = false; this.has_audio_ = false; this.video_init_segment_dispatched_ = false; this.audio_init_segment_dispatched_ = false; this.video_metadata_changed_ = false; this.audio_metadata_changed_ = false; this.loas_previous_frame = null; this.video_track_ = { type: 'video', id: 1, sequenceNumber: 0, samples: [], length: 0 }; this.audio_track_ = { type: 'audio', id: 2, sequenceNumber: 0, samples: [], length: 0 }; this._remainingPacketData = null; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { this.media_info_ = null; this.pes_slice_queues_ = null; this.section_slice_queues_ = null; this.video_metadata_ = null; this.audio_metadata_ = null; this.aac_last_incomplete_data_ = null; this.video_track_ = null; this.audio_track_ = null; this._remainingPacketData = null; super.destroy(); } probe(buffer) { let data = new Uint8Array(buffer); let sync_offset = -1; let ts_packet_size = 188; if (data.byteLength <= 3 * ts_packet_size) { return { needMoreData: true }; } while (sync_offset === -1) { let scan_window = Math.min(1000, data.byteLength - 3 * ts_packet_size); for (let i = 0; i < scan_window;) { // sync_byte should all be 0x47 if (data[i] === 0x47 && data[i + ts_packet_size] === 0x47 && data[i + 2 * ts_packet_size] === 0x47) { sync_offset = i; break; } else { i++; } } // find sync_offset failed in previous ts_packet_size if (sync_offset === -1) { if (ts_packet_size === 188) { // try 192 packet size (BDAV, etc.) ts_packet_size = 192; } else if (ts_packet_size === 192) { // try 204 packet size (European DVB, etc.) ts_packet_size = 204; } else { // 192, 204 also failed, exit break; } } } if (sync_offset === -1) { // both 188, 192, 204 failed, Non MPEG-TS return { match: false }; } if (ts_packet_size === 192 && sync_offset >= 4) { // Log.v('TSDemuxer', `ts_packet_size = 192, m2ts mode`); sync_offset -= 4; } return { match: true, consumed: 0, ts_packet_size, sync_offset }; } _initPmt() { return { program_number: 0, version_number: 0, pcr_pid: 0, // pid -> stream_type pid_stream_type: {}, common_pids: { h264: undefined, h265: undefined, adts_aac: undefined, loas_aac: undefined, opus: undefined, ac3: undefined, eac3: undefined, mp3: undefined }, pes_private_data_pids: {}, timed_id3_pids: {}, synchronous_klv_pids: {}, asynchronous_klv_pids: {}, scte_35_pids: {}, smpte2038_pids: {} }; } dispatch(data) { // 传入的是Uint8Array 但是这里需要ArrayBuffer let arrayBuffer = new Uint8Array(data); if (this._remainingPacketData) { arrayBuffer = concatUint8Array(this._remainingPacketData, arrayBuffer); this._remainingPacketData = null; } let chunk = arrayBuffer.buffer; const offset = this.parseChunks(chunk); // unconsumed data remain if (offset) { this._remainingPacketData = arrayBuffer.subarray(offset); } else { if (arrayBuffer.length < this.tsPacketSize) { this._remainingPacketData = arrayBuffer; } } } parseChunks(chunk) { // 偏移量 let offset = 0; if (this.first_parse_) { this.first_parse_ = false; const probeData = this.probe(chunk); if (probeData.match) { this.tsPacketSize = probeData.ts_packet_size; this.syncOffset = probeData.sync_offset; } offset = this.syncOffset; this.player.debug.log(this.TAG_NAME, `isFirstDispatch and tsPacketSize = ${this.tsPacketSize}, syncOffset = ${this.syncOffset}`); } while (offset + this.tsPacketSize <= chunk.byteLength) { if (this.tsPacketSize === 192) { offset += 4; } const data = new Uint8Array(chunk, offset, 188); let sync_byte = data[0]; if (sync_byte !== 0x47) { this.player.debug.warn(this.TAG_NAME, `parseChunks() sync_byte = ${sync_byte}, not start with 0x47, so break`); break; } let payload_unit_start_indicator = (data[1] & 0x40) >>> 6; (data[1] & 0x20) >>> 5; let pid = (data[1] & 0x1F) << 8 | data[2]; let adaptation_field_control = (data[3] & 0x30) >>> 4; let continuity_conunter = data[3] & 0x0F; let is_pcr_pid = this.pmt_ && this.pmt_.pcr_pid === pid ? true : false; let adaptation_field_info = {}; let ts_payload_start_index = 4; if (adaptation_field_control == 0x02 || adaptation_field_control == 0x03) { let adaptation_field_length = data[4]; if (adaptation_field_length > 0 && (is_pcr_pid || adaptation_field_control == 0x03)) { // Parse adaptation field adaptation_field_info.discontinuity_indicator = (data[5] & 0x80) >>> 7; adaptation_field_info.random_access_indicator = (data[5] & 0x40) >>> 6; adaptation_field_info.elementary_stream_priority_indicator = (data[5] & 0x20) >>> 5; let PCR_flag = (data[5] & 0x10) >>> 4; if (PCR_flag) { let pcr_base = data[6] << 25 | data[7] << 17 | data[8] << 9 | data[9] << 1 | data[10] >>> 7; let pcr_extension = (data[10] & 0x01) << 8 | data[11]; let pcr = pcr_base * 300 + pcr_extension; this.last_pcr_ = pcr; } } if (adaptation_field_control == 0x02 || 5 + adaptation_field_length === 188) { // TS packet only has adaption field, jump to next offset += 188; if (this.tsPacketSize === 204) { // skip parity word (16 bytes) for RS encoded TS offset += 16; } continue; } else { // Point ts_payload_start_index to the start of payload ts_payload_start_index = 4 + 1 + adaptation_field_length; } } if (adaptation_field_control == 0x01 || adaptation_field_control == 0x03) { if (pid === 0 || // PAT (pid === 0) pid === this.current_pmt_pid_ || // PMT this.pmt_ != undefined && this.pmt_.pid_stream_type[pid] === TS_STREAM_TYPE.kSCTE35) { // SCTE35 let ts_payload_length = 188 - ts_payload_start_index; this.handleSectionSlice(chunk, offset + ts_payload_start_index, ts_payload_length, { pid, payload_unit_start_indicator, continuity_conunter, random_access_indicator: adaptation_field_info.random_access_indicator }); } else if (this.pmt_ != undefined && this.pmt_.pid_stream_type[pid] != undefined) { // PES let ts_payload_length = 188 - ts_payload_start_index; let stream_type = this.pmt_.pid_stream_type[pid]; // process PES only for known common_pids if (pid === this.pmt_.common_pids.h264 || pid === this.pmt_.common_pids.h265 || pid === this.pmt_.common_pids.adts_aac || pid === this.pmt_.common_pids.loas_aac || pid === this.pmt_.common_pids.ac3 || pid === this.pmt_.common_pids.eac3 || pid === this.pmt_.common_pids.opus || pid === this.pmt_.common_pids.mp3 || this.pmt_.pes_private_data_pids[pid] === true || this.pmt_.timed_id3_pids[pid] === true || this.pmt_.synchronous_klv_pids[pid] === true || this.pmt_.asynchronous_klv_pids[pid] === true) { this.handlePESSlice(chunk, offset + ts_payload_start_index, ts_payload_length, { pid, stream_type, payload_unit_start_indicator, continuity_conunter, random_access_indicator: adaptation_field_info.random_access_indicator }); } } } offset += 188; if (this.tsPacketSize === 204) { // skip parity word (16 bytes) for RS encoded TS offset += 16; } } this.dispatchAudioVideoMediaSegment(); return offset; } handleSectionSlice(buffer, offset, length, misc) { let data = new Uint8Array(buffer, offset, length); let slice_queue = this.section_slice_queues_[misc.pid]; if (misc.payload_unit_start_indicator) { let pointer_field = data[0]; if (slice_queue != undefined && slice_queue.total_length !== 0) { let remain_section = new Uint8Array(buffer, offset + 1, Math.min(length, pointer_field)); slice_queue.slices.push(remain_section); slice_queue.total_length += remain_section.byteLength; if (slice_queue.total_length === slice_queue.expected_length) { this.emitSectionSlices(slice_queue, misc); } else { this.clearSlices(slice_queue, misc); } } for (let i = 1 + pointer_field; i < data.byteLength;) { let table_id = data[i + 0]; if (table_id === 0xFF) { // this.player.debug.log(this.TAG_NAME, `handleSectionSlice(): table_id is 0xFF, skip`); break; } let section_length = (data[i + 1] & 0x0F) << 8 | data[i + 2]; this.section_slice_queues_[misc.pid] = new SliceQueue(); slice_queue = this.section_slice_queues_[misc.pid]; slice_queue.expected_length = section_length + 3; slice_queue.random_access_indicator = misc.random_access_indicator; let remain_section = new Uint8Array(buffer, offset + i, Math.min(length - i, slice_queue.expected_length - slice_queue.total_length)); slice_queue.slices.push(remain_section); slice_queue.total_length += remain_section.byteLength; if (slice_queue.total_length === slice_queue.expected_length) { this.emitSectionSlices(slice_queue, misc); } else if (slice_queue.total_length >= slice_queue.expected_length) { this.clearSlices(slice_queue, misc); } i += remain_section.byteLength; } } else if (slice_queue != undefined && slice_queue.total_length !== 0) { let remain_section = new Uint8Array(buffer, offset, Math.min(length, slice_queue.expected_length - slice_queue.total_length)); slice_queue.slices.push(remain_section); slice_queue.total_length += remain_section.byteLength; if (slice_queue.total_length === slice_queue.expected_length) { this.emitSectionSlices(slice_queue, misc); } else if (slice_queue.total_length >= slice_queue.expected_length) { this.clearSlices(slice_queue, misc); } } } handlePESSlice(buffer, offset, length, misc) { let data = new Uint8Array(buffer, offset, length); let packet_start_code_prefix = data[0] << 16 | data[1] << 8 | data[2]; data[3]; let PES_packet_length = data[4] << 8 | data[5]; // start 开始阶段 if (misc.payload_unit_start_indicator) { if (packet_start_code_prefix !== 1) { this.player.debug.warn(this.TAG_NAME, `handlePESSlice: packet_start_code_prefix should be 1 but with value ${packet_start_code_prefix}`); return; } // handle queued PES slices: // Merge into a big Uint8Array then call parsePES() let slice_queue = this.pes_slice_queues_[misc.pid]; if (slice_queue) { if (slice_queue.expected_length === 0 || slice_queue.expected_length === slice_queue.total_length) { this.emitPESSlices(slice_queue, misc); } else { this.clearSlices(slice_queue, misc); } } // Make a new PES queue for new PES slices this.pes_slice_queues_[misc.pid] = new SliceQueue(); this.pes_slice_queues_[misc.pid].random_access_indicator = misc.random_access_indicator; } if (this.pes_slice_queues_[misc.pid] == undefined) { // ignore PES slices without [PES slice that has payload_unit_start_indicator] return; } // push subsequent PES slices into pes_queue let slice_queue = this.pes_slice_queues_[misc.pid]; slice_queue.slices.push(data); if (misc.payload_unit_start_indicator) { slice_queue.expected_length = PES_packet_length === 0 ? 0 : PES_packet_length + 6; } slice_queue.total_length += data.byteLength; if (slice_queue.expected_length > 0 && slice_queue.expected_length === slice_queue.total_length) { this.emitPESSlices(slice_queue, misc); } else if (slice_queue.expected_length > 0 && slice_queue.expected_length < slice_queue.total_length) { this.clearSlices(slice_queue, misc); } } emitSectionSlices(slice_queue, misc) { let data = new Uint8Array(slice_queue.total_length); for (let i = 0, offset = 0; i < slice_queue.slices.length; i++) { let slice = slice_queue.slices[i]; data.set(slice, offset); offset += slice.byteLength; } slice_queue.slices = []; slice_queue.expected_length = -1; slice_queue.total_length = 0; let section_data = {}; section_data.pid = misc.pid; section_data.data = data; section_data.file_position = slice_queue.file_position; section_data.random_access_indicator = slice_queue.random_access_indicator; this.parseSection(section_data); } emitPESSlices(slice_queue, misc) { let data = new Uint8Array(slice_queue.total_length); for (let i = 0, offset = 0; i < slice_queue.slices.length; i++) { let slice = slice_queue.slices[i]; data.set(slice, offset); offset += slice.byteLength; } slice_queue.slices = []; slice_queue.expected_length = -1; slice_queue.total_length = 0; let pes_data = new PESData(); pes_data.pid = misc.pid; pes_data.data = data; pes_data.stream_type = misc.stream_type; pes_data.random_access_indicator = slice_queue.random_access_indicator; this.parsePES(pes_data); } clearSlices(slice_queue) { slice_queue.slices = []; slice_queue.expected_length = -1; slice_queue.total_length = 0; } parseSection(section_data) { let data = section_data.data; let pid = section_data.pid; if (pid === 0x00) { this.parsePAT(data); } else if (pid === this.current_pmt_pid_) { this.parsePMT(data); } else if (this.pmt_ != undefined && this.pmt_.scte_35_pids[pid]) { this.parseSCTE35(data); } } parsePES(pes_data) { let data = pes_data.data; let packet_start_code_prefix = data[0] << 16 | data[1] << 8 | data[2]; let stream_id = data[3]; let PES_packet_length = data[4] << 8 | data[5]; if (packet_start_code_prefix !== 1) { this.player.debug.error(this.TAG_NAME, `parsePES: packet_start_code_prefix should be 1 but with value ${packet_start_code_prefix}`); return; } if (stream_id !== 0xBC // program_stream_map && stream_id !== 0xBE // padding_stream && stream_id !== 0xBF // private_stream_2 && stream_id !== 0xF0 // ECM && stream_id !== 0xF1 // EMM && stream_id !== 0xFF // program_stream_directory && stream_id !== 0xF2 // DSMCC && stream_id !== 0xF8) { (data[6] & 0x30) >>> 4; let PTS_DTS_flags = (data[7] & 0xC0) >>> 6; let PES_header_data_length = data[8]; let pts; let dts; if (PTS_DTS_flags === 0x02 || PTS_DTS_flags === 0x03) { pts = (data[9] & 0x0E) * 536870912 + // 1 << 29 (data[10] & 0xFF) * 4194304 + // 1 << 22 (data[11] & 0xFE) * 16384 + // 1 << 14 (data[12] & 0xFF) * 128 + // 1 << 7 (data[13] & 0xFE) / 2; if (PTS_DTS_flags === 0x03) { dts = (data[14] & 0x0E) * 536870912 + // 1 << 29 (data[15] & 0xFF) * 4194304 + // 1 << 22 (data[16] & 0xFE) * 16384 + // 1 << 14 (data[17] & 0xFF) * 128 + // 1 << 7 (data[18] & 0xFE) / 2; } else { dts = pts; } } let payload_start_index = 6 + 3 + PES_header_data_length; let payload_length; if (PES_packet_length !== 0) { if (PES_packet_length < 3 + PES_header_data_length) { this.player.debug.warn(this.TAG_NAME, `Malformed PES: PES_packet_length < 3 + PES_header_data_length`); return; } payload_length = PES_packet_length - 3 - PES_header_data_length; } else { // PES_packet_length === 0 payload_length = data.byteLength - payload_start_index; } let payload = data.subarray(payload_start_index, payload_start_index + payload_length); switch (pes_data.stream_type) { case TS_STREAM_TYPE.kMPEG1Audio: case TS_STREAM_TYPE.kMPEG2Audio: this.parseMP3Payload(payload, pts); break; case TS_STREAM_TYPE.kPESPrivateData: if (this.pmt_.common_pids.opus === pes_data.pid) { this.parseOpusPayload(payload, pts); } else if (this.pmt_.common_pids.ac3 === pes_data.pid) { this.parseAC3Payload(payload, pts); } else if (this.pmt_.common_pids.eac3 === pes_data.pid) { this.parseEAC3Payload(payload, pts); } else if (this.pmt_.asynchronous_klv_pids[pes_data.pid]) { this.parseAsynchronousKLVMetadataPayload(payload, pes_data.pid, stream_id); } else if (this.pmt_.smpte2038_pids[pes_data.pid]) { this.parseSMPTE2038MetadataPayload(payload, pts, dts, pes_data.pid, stream_id); } else { this.parsePESPrivateDataPayload(payload, pts, dts, pes_data.pid, stream_id); } break; case TS_STREAM_TYPE.kADTSAAC: this.parseADTSAACPayload(payload, pts); break; case TS_STREAM_TYPE.kLOASAAC: this.parseLOASAACPayload(payload, pts); break; case TS_STREAM_TYPE.kAC3: this.parseAC3Payload(payload, pts); break; case TS_STREAM_TYPE.kEAC3: this.parseEAC3Payload(payload, pts); break; case TS_STREAM_TYPE.kMetadata: if (this.pmt_.timed_id3_pids[pes_data.pid]) { this.parseTimedID3MetadataPayload(payload, pts, dts, pes_data.pid, stream_id); } else if (this.pmt_.synchronous_klv_pids[pes_data.pid]) { this.parseSynchronousKLVMetadataPayload(payload, pts, dts, pes_data.pid, stream_id); } break; case TS_STREAM_TYPE.kH264: this.parseH264Payload(payload, pts, dts, pes_data.random_access_indicator); break; case TS_STREAM_TYPE.kH265: this.parseH265Payload(payload, pts, dts, pes_data.random_access_indicator); break; } } else if (stream_id === 0xBC // program_stream_map || stream_id === 0xBF // private_stream_2 || stream_id === 0xF0 // ECM || stream_id === 0xF1 // EMM || stream_id === 0xFF // program_stream_directory || stream_id === 0xF2 // DSMCC_stream || stream_id === 0xF8) { // ITU-T Rec. H.222.1 type E stream if (pes_data.stream_type === TS_STREAM_TYPE.kPESPrivateData) { let payload_start_index = 6; let payload_length; if (PES_packet_length !== 0) { payload_length = PES_packet_length; } else { // PES_packet_length === 0 payload_length = data.byteLength - payload_start_index; } let payload = data.subarray(payload_start_index, payload_start_index + payload_length); this.parsePESPrivateDataPayload(payload, undefined, undefined, pes_data.pid, stream_id); } } } parsePAT(data) { let table_id = data[0]; if (table_id !== 0x00) { Log.e(this.TAG, `parsePAT: table_id ${table_id} is not corresponded to PAT!`); return; } let section_length = (data[1] & 0x0F) << 8 | data[2]; data[3] << 8 | data[4]; let version_number = (data[5] & 0x3E) >>> 1; let current_next_indicator = data[5] & 0x01; let section_number = data[6]; data[7]; let pat = null; if (current_next_indicator === 1 && section_number === 0) { pat = { version_number: 0, network_pid: 0, program_pmt_pid: {} }; pat.version_number = version_number; } else { pat = this.pat_; if (pat == undefined) { return; } } let program_start_index = 8; let program_bytes = section_length - 5 - 4; // section_length - (headers + crc) let first_program_number = -1; let first_pmt_pid = -1; for (let i = program_start_index; i < program_start_index + program_bytes; i += 4) { let program_number = data[i] << 8 | data[i + 1]; let pid = (data[i + 2] & 0x1F) << 8 | data[i + 3]; if (program_number === 0) { // network_PID pat.network_pid = pid; } else { // program_map_PID pat.program_pmt_pid[program_number] = pid; if (first_program_number === -1) { first_program_number = program_number; } if (first_pmt_pid === -1) { first_pmt_pid = pid; } } } // Currently we only deal with first appeared PMT pid if (current_next_indicator === 1 && section_number === 0) { if (this.pat_ == undefined) { this.player.debug.log(this.TAG_NAME, `Parsed first PAT: ${JSON.stringify(pat)}`); } this.pat_ = pat; this.current_program_ = first_program_number; this.current_pmt_pid_ = first_pmt_pid; } } parsePMT(data) { let table_id = data[0]; if (table_id !== 0x02) { this.player.debug.error(this.TAG_NAME, `parsePMT: table_id ${table_id} is not corresponded to PMT!`); return; } let section_length = (data[1] & 0x0F) << 8 | data[2]; let program_number = data[3] << 8 | data[4]; let version_number = (data[5] & 0x3E) >>> 1; let current_next_indicator = data[5] & 0x01; let section_number = data[6]; data[7]; let pmt; if (current_next_indicator === 1 && section_number === 0) { pmt = this._initPmt(); pmt.program_number = program_number; pmt.version_number = version_number; this.program_pmt_map_[program_number] = pmt; } else { pmt = this.program_pmt_map_[program_number]; if (pmt == undefined) { return; } } pmt.pcr_pid = (data[8] & 0x1F) << 8 | data[9]; let program_info_length = (data[10] & 0x0F) << 8 | data[11]; let info_start_index = 12 + program_info_length; let info_bytes = section_length - 9 - program_info_length - 4; for (let i = info_start_index; i < info_start_index + info_bytes;) { let stream_type = data[i]; let elementary_PID = (data[i + 1] & 0x1F) << 8 | data[i + 2]; let ES_info_length = (data[i + 3] & 0x0F) << 8 | data[i + 4]; pmt.pid_stream_type[elementary_PID] = stream_type; let already_has_video = pmt.common_pids.h264 || pmt.common_pids.h265; let already_has_audio = pmt.common_pids.adts_aac || pmt.common_pids.loas_aac || pmt.common_pids.ac3 || pmt.common_pids.eac3 || pmt.common_pids.opus || pmt.common_pids.mp3; if (stream_type === TS_STREAM_TYPE.kH264 && !already_has_video) { pmt.common_pids.h264 = elementary_PID; } else if (stream_type === TS_STREAM_TYPE.kH265 && !already_has_video) { pmt.common_pids.h265 = elementary_PID; } else if (stream_type === TS_STREAM_TYPE.kADTSAAC && !already_has_audio) { pmt.common_pids.adts_aac = elementary_PID; } else if (stream_type === TS_STREAM_TYPE.kLOASAAC && !already_has_audio) { pmt.common_pids.loas_aac = elementary_PID; } else if (stream_type === TS_STREAM_TYPE.kAC3 && !already_has_audio) { pmt.common_pids.ac3 = elementary_PID; // ATSC AC-3 } else if (stream_type === TS_STREAM_TYPE.kEAC3 && !already_has_audio) { pmt.common_pids.eac3 = elementary_PID; // ATSC EAC-3 } else if ((stream_type === TS_STREAM_TYPE.kMPEG1Audio || stream_type === TS_STREAM_TYPE.kMPEG2Audio) && !already_has_audio) { pmt.common_pids.mp3 = elementary_PID; } else if (stream_type === TS_STREAM_TYPE.kPESPrivateData) { pmt.pes_private_data_pids[elementary_PID] = true; if (ES_info_length > 0) { // parse descriptor for PES private data for (let offset = i + 5; offset < i + 5 + ES_info_length;) { let tag = data[offset + 0]; let length = data[offset + 1]; if (tag === 0x05) { // Registration Descriptor let registration = String.fromCharCode(...Array.from(data.subarray(offset + 2, offset + 2 + length))); if (registration === 'VANC') { pmt.smpte2038_pids[elementary_PID] = true; } /* else if (registration === 'AC-3' && !already_has_audio) { pmt.common_pids.ac3 = elementary_PID; // DVB AC-3 (FIXME: NEED VERIFY) } */ /* else if (registration === 'EC-3' && !alrady_has_audio) { pmt.common_pids.eac3 = elementary_PID; // DVB EAC-3 (FIXME: NEED VERIFY) } */else if (registration === 'Opus') { pmt.common_pids.opus = elementary_PID; } else if (registration === 'KLVA') { pmt.asynchronous_klv_pids[elementary_PID] = true; } } else if (tag === 0x7F) { // DVB extension descriptor if (elementary_PID === pmt.common_pids.opus) { let ext_desc_tag = data[offset + 2]; let channel_config_code = null; if (ext_desc_tag === 0x80) { // User defined (provisional Opus) channel_config_code = data[offset + 3]; } if (channel_config_code == null) { Log.e(this.TAG, `Not Supported Opus channel count.`); continue; } const meta = { codec: 'opus', channel_count: (channel_config_code & 0x0F) === 0 ? 2 : channel_config_code & 0x0F, channel_config_code, sample_rate: 48000 }; const sample = { codec: 'opus', meta }; if (this.audio_init_segment_dispatched_ == false) { this.audio_metadata_ = meta; this.dispatchAudioInitSegment(sample); } else if (this.detectAudioMetadataChange(sample)) { // flush stashed frames before notify new AudioSpecificConfig this.dispatchAudioMediaSegment(); // notify new AAC AudioSpecificConfig this.dispatchAudioInitSegment(sample); } } } offset += 2 + length; } // provide descriptor for PES private data via callback let descriptors = data.subarray(i + 5, i + 5 + ES_info_length); this.dispatchPESPrivateDataDescriptor(elementary_PID, stream_type, descriptors); } } else if (stream_type === TS_STREAM_TYPE.kMetadata) { if (ES_info_length > 0) { // parse descriptor for PES private data for (let offset = i + 5; offset < i + 5 + ES_info_length;) { let tag = data[offset + 0]; let length = data[offset + 1]; if (tag === 0x26) { let metadata_application_format = data[offset + 2] << 8 | data[offset + 3] << 0; let metadata_application_format_identifier = null; if (metadata_application_format === 0xFFFF) { metadata_application_format_identifier = String.fromCharCode(...Array.from(data.subarray(offset + 4, offset + 4 + 4))); } let metadata_format = data[offset + 4 + (metadata_application_format === 0xFFFF ? 4 : 0)]; let metadata_format_identifier = null; if (metadata_format === 0xFF) { let pad = 4 + (metadata_application_format === 0xFFFF ? 4 : 0) + 1; metadata_format_identifier = String.fromCharCode(...Array.from(data.subarray(offset + pad, offset + pad + 4))); } if (metadata_application_format_identifier === 'ID3 ' && metadata_format_identifier === 'ID3 ') { pmt.timed_id3_pids[elementary_PID] = true; } else if (metadata_format_identifier === 'KLVA') { pmt.synchronous_klv_pids[elementary_PID] = true; } } offset += 2 + length; } } } else if (stream_type === TS_STREAM_TYPE.kSCTE35) { pmt.scte_35_pids[elementary_PID] = true; } i += 5 + ES_info_length; } if (program_number === this.current_program_) { if (this.pmt_ == undefined) { this.player.debug.log(this.TAG_NAME, `Parsed first PMT: ${JSON.stringify(pmt)}`); } this.pmt_ = pmt; if (pmt.common_pids.h264 || pmt.common_pids.h265) { this.has_video_ = true; } if (pmt.common_pids.adts_aac || pmt.common_pids.loas_aac || pmt.common_pids.ac3 || pmt.common_pids.opus || pmt.common_pids.mp3) { this.has_audio_ = true; } } } parseSCTE35(data) { // todo: 待解决 } parseH264Payload(data, pts, dts, random_access_indicator) { let annexb_parser = new H264AnnexBParser(data); let nalu_payload = null; let payload = null; let units = []; let length = 0; let keyframe = false; while ((nalu_payload = annexb_parser.readNextNaluPayload()) != null) { let nalu_avc1 = new H264NaluAVC1(nalu_payload); if (nalu_avc1.type === H264_NAL_TYPE.kSliceSPS) { // Notice: parseSPS requires Nalu without startcode or length-header let details = SPSParser.parseSPS$2(nalu_payload.data); if (!this.video_init_segment_dispatched_) { this.video_metadata_.sps = nalu_avc1; this.video_metadata_.details = details; } else if (this.detectVideoMetadataChange(nalu_avc1, details) === true) { this.player.debug.log(this.TAG_NAME, `H264: Critical h264 metadata has been changed, attempt to re-generate InitSegment`); this.video_metadata_changed_ = true; this.video_metadata_ = { vps: undefined, sps: nalu_avc1, pps: undefined, details: details }; } } else if (nalu_avc1.type === H264_NAL_TYPE.kSlicePPS) { if (!this.video_init_segment_dispatched_ || this.video_metadata_changed_) { this.video_metadata_.pps = nalu_avc1; if (this.video_metadata_.sps && this.video_metadata_.pps) { if (this.video_metadata_changed_) { // flush stashed frames before changing codec metadata this.dispatchVideoMediaSegment(); } // notify new codec metadata (maybe changed) this.dispatchVideoInitSegment(); } } } else if (nalu_avc1.type === H264_NAL_TYPE.kSliceIDR) { keyframe = true; } else if (nalu_avc1.type === H264_NAL_TYPE.kSliceNonIDR && random_access_indicator === 1) { // For open-gop stream, use random_access_indicator to identify keyframe keyframe = true; } // Push samples to remuxer only if initialization metadata has been dispatched if (this.video_init_segment_dispatched_) { units.push(nalu_avc1); length += nalu_avc1.data.byteLength; } } let pts_ms = Math.floor(pts / this.timescale_); let dts_ms = Math.floor(dts / this.timescale_); if (units.length) { let track = this.video_track_; // units 合并成一个 Uint8Array 赋值给 payload for (let i = 0; i < units.length; i++) { let unit = units[i]; if (payload == null) { payload = unit.data; } else { let temp = new Uint8Array(payload.byteLength + unit.data.byteLength); temp.set(payload, 0); temp.set(unit.data, payload.byteLength); payload = temp; } } let avc_sample = { // units, length, isIFrame: keyframe, dts: dts_ms, pts: pts_ms, cts: pts_ms - dts_ms, payload, type: MEDIA_TYPE.video, isHevc: false }; track.samples.push(avc_sample); // track.length += length; track.length = payload.byteLength; } } parseH265Payload(data, pts, dts, random_access_indicator) { let annexb_parser = new H265AnnexBParser(data); let nalu_payload = null; let payload = null; let units = []; let length = 0; let keyframe = false; while ((nalu_payload = annexb_parser.readNextNaluPayload()) != null) { let nalu_hvc1 = new H265NaluHVC1(nalu_payload); if (nalu_hvc1.type === H265_NAL_TYPE.kSliceVPS) { if (!this.video_init_segment_dispatched_) { let details = H265NaluParser.parseVPS(nalu_payload.data); this.video_metadata_.vps = nalu_hvc1; this.video_metadata_.details = { ...this.video_metadata_.details, ...details }; } } else if (nalu_hvc1.type === H265_NAL_TYPE.kSliceSPS) { let details = H265NaluParser.parseSPS(nalu_payload.data); if (!this.video_init_segment_dispatched_) { this.video_metadata_.sps = nalu_hvc1; this.video_metadata_.details = { ...this.video_metadata_.details, ...details }; } else if (this.detectVideoMetadataChange(nalu_hvc1, details) === true) { this.player.debug.log(this.TAG_NAME, `H265: Critical h265 metadata has been changed, attempt to re-generate InitSegment`); this.video_metadata_changed_ = true; this.video_metadata_ = { vps: undefined, sps: nalu_hvc1, pps: undefined, details: details }; } } else if (nalu_hvc1.type === H265_NAL_TYPE.kSlicePPS) { if (!this.video_init_segment_dispatched_ || this.video_metadata_changed_) { let details = H265NaluParser.parsePPS(nalu_payload.data); this.video_metadata_.pps = nalu_hvc1; this.video_metadata_.details = { ...this.video_metadata_.details, ...details }; if (this.video_metadata_.vps && this.video_metadata_.sps && this.video_metadata_.pps) { if (this.video_metadata_changed_) { // flush stashed frames before changing codec metadata this.dispatchVideoMediaSegment(); } // notify new codec metadata (maybe changed) this.dispatchVideoInitSegment(); } } } else if (nalu_hvc1.type === H265_NAL_TYPE.kSliceIDR_W_RADL || nalu_hvc1.type === H265_NAL_TYPE.kSliceIDR_N_LP || nalu_hvc1.type === H265_NAL_TYPE.kSliceCRA_NUT) { keyframe = true; } // Push samples to remuxer only if initialization metadata has been dispatched if (this.video_init_segment_dispatched_) { units.push(nalu_hvc1); length += nalu_hvc1.data.byteLength; } } let pts_ms = Math.floor(pts / this.timescale_); let dts_ms = Math.floor(dts / this.timescale_); if (units.length) { let track = this.video_track_; // units 合并成一个 Uint8Array 赋值给 payload for (let i = 0; i < units.length; i++) { let unit = units[i]; if (payload == null) { payload = unit.data; } else { let temp = new Uint8Array(payload.byteLength + unit.data.byteLength); temp.set(payload, 0); temp.set(unit.data, payload.byteLength); payload = temp; } } let hvc_sample = { type: MEDIA_TYPE.video, // units, length, isIFrame: keyframe, dts: dts_ms, pts: pts_ms, cts: pts_ms - dts_ms, payload, isHevc: true }; track.samples.push(hvc_sample); // track.length += length; track.length = payload.byteLength; } } detectVideoMetadataChange(new_sps, new_details) { if (new_details.codec_mimetype !== this.video_metadata_.details.codec_mimetype) { this.player.debug.log(this.TAG_NAME, `Video: Codec mimeType changed from ` + `${this.video_metadata_.details.codec_mimetype} to ${new_details.codec_mimetype}`); return true; } if (new_details.codec_size.width !== this.video_metadata_.details.codec_size.width || new_details.codec_size.height !== this.video_metadata_.details.codec_size.height) { let old_size = this.video_metadata_.details.codec_size; let new_size = new_details.codec_size; this.player.debug.log(this.TAG_NAME, `Video: Coded Resolution changed from ` + `${old_size.width}x${old_size.height} to ${new_size.width}x${new_size.height}`); return true; } if (new_details.present_size.width !== this.video_metadata_.details.present_size.width) { this.player.debug.log(this.TAG_NAME, `Video: Present resolution width changed from ` + `${this.video_metadata_.details.present_size.width} to ${new_details.present_size.width}`); return true; } return false; } isInitSegmentDispatched() { if (this.has_video_ && this.has_audio_) { // both video & audio return this.video_init_segment_dispatched_ && this.audio_init_segment_dispatched_; } if (this.has_video_ && !this.has_audio_) { // video only return this.video_init_segment_dispatched_; } if (!this.has_video_ && this.has_audio_) { // audio only return this.audio_init_segment_dispatched_; } return false; } dispatchVideoInitSegment() { let details = this.video_metadata_.details; let meta = {}; meta.type = 'video'; meta.id = this.video_track_.id; meta.timescale = 1000; meta.duration = this.duration_; meta.codecWidth = details.codec_size.width; meta.codecHeight = details.codec_size.height; meta.presentWidth = details.present_size.width; meta.presentHeight = details.present_size.height; meta.profile = details.profile_string; meta.level = details.level_string; meta.bitDepth = details.bit_depth; meta.chromaFormat = details.chroma_format; meta.sarRatio = details.sar_ratio; meta.frameRate = details.frame_rate; let fps_den = meta.frameRate.fps_den; let fps_num = meta.frameRate.fps_num; meta.refSampleDuration = 1000 * (fps_den / fps_num); meta.codec = details.codec_mimetype; let isHevc = false; // hevc if (this.video_metadata_.vps) { isHevc = true; let vps_without_header = this.video_metadata_.vps.data.subarray(4); let sps_without_header = this.video_metadata_.sps.data.subarray(4); let pps_without_header = this.video_metadata_.pps.data.subarray(4); meta.hvcc = hevcEncoderConfigurationRecord$2({ vps: vps_without_header, sps: sps_without_header, pps: pps_without_header }); if (this.video_init_segment_dispatched_ == false) { this.player.debug.log(this.TAG_NAME, `Generated first HEVCDecoderConfigurationRecord for mimeType: ${meta.codec}`); } if (meta.hvcc) { this._doDecodeByTs(meta.hvcc, MEDIA_TYPE.video, 0, true, 0); } } else { // avc let sps_without_header = this.video_metadata_.sps.data.subarray(4); let pps_without_header = this.video_metadata_.pps.data.subarray(4); meta.avcc = avcEncoderConfigurationRecord$2({ sps: sps_without_header, pps: pps_without_header }); if (this.video_init_segment_dispatched_ == false) { this.player.debug.log(this.TAG_NAME, `Generated first AVCDecoderConfigurationRecord for mimeType: ${meta.codec}`); } if (meta.avcc) { this._doDecodeByTs(meta.avcc, MEDIA_TYPE.video, 0, true, 0); } } // this.onTrackMetadata('video', meta); this.video_init_segment_dispatched_ = true; this.video_metadata_changed_ = false; // notify new MediaInfo let mi = this.media_info_; mi.hasVideo = true; mi.width = meta.codecWidth; mi.height = meta.codecHeight; mi.fps = meta.frameRate.fps; mi.profile = meta.profile; mi.level = meta.level; mi.refFrames = details.ref_frames; mi.chromaFormat = details.chroma_format_string; mi.sarNum = meta.sarRatio.width; mi.sarDen = meta.sarRatio.height; mi.videoCodec = meta.codec; if (mi.hasAudio && mi.audioCodec) { mi.mimeType = `video/mp2t; codecs="${mi.videoCodec},${mi.audioCodec}"`; } else { mi.mimeType = `video/mp2t; codecs="${mi.videoCodec}"`; } this.player.video.updateVideoInfo({ width: meta.codecWidth, height: meta.codecHeight, encTypeCode: isHevc ? VIDEO_ENC_CODE.h265 : VIDEO_ENC_CODE.h264 }); // if (mi.isComplete()) { // this.onMediaInfo(mi); // } // console.error('dispatchVideoInitSegment', mi, mi.isComplete()); } dispatchVideoMediaSegment() { if (this.isInitSegmentDispatched()) { if (this.video_track_.length) { this._preDoDecode(); } } } dispatchAudioMediaSegment() { if (this.isInitSegmentDispatched()) { if (this.audio_track_.length) { // this.onDataAvailable(this.audio_track_, null); this._preDoDecode(); } } } dispatchAudioVideoMediaSegment() { if (this.isInitSegmentDispatched()) { if (this.audio_track_.length || this.video_track_.length) { this._preDoDecode(); } } } parseADTSAACPayload(data, pts) { if (this.has_video_ && !this.video_init_segment_dispatched_) { // If first video IDR frame hasn't been detected, // Wait for first IDR frame and video init segment being dispatched return; } if (this.aac_last_incomplete_data_) { let buf = new Uint8Array(data.byteLength + this.aac_last_incomplete_data_.byteLength); buf.set(this.aac_last_incomplete_data_, 0); buf.set(data, this.aac_last_incomplete_data_.byteLength); data = buf; } let ref_sample_duration; let base_pts_ms; if (pts != undefined) { base_pts_ms = pts / this.timescale_; } if (this.audio_metadata_.codec === 'aac') { if (pts == undefined && this.audio_last_sample_pts_ != undefined) { ref_sample_duration = 1024 / this.audio_metadata_.sampling_frequency * 1000; base_pts_ms = this.audio_last_sample_pts_ + ref_sample_duration; } else if (pts == undefined) { this.player.debug.warn(this.TAG_NAME, `AAC: Unknown pts`); return; } if (this.aac_last_incomplete_data_ && this.audio_last_sample_pts_) { ref_sample_duration = 1024 / this.audio_metadata_.sampling_frequency * 1000; let new_pts_ms = this.audio_last_sample_pts_ + ref_sample_duration; if (Math.abs(new_pts_ms - base_pts_ms) > 1) { this.player.debug.warn(this.TAG_NAME, `AAC: Detected pts overlapped, ` + `expected: ${new_pts_ms}ms, PES pts: ${base_pts_ms}ms`); base_pts_ms = new_pts_ms; } } } let adts_parser = new AACADTSParser(data); let aac_frame = null; let sample_pts_ms = base_pts_ms; let last_sample_pts_ms; while ((aac_frame = adts_parser.readNextAACFrame()) != null) { ref_sample_duration = 1024 / aac_frame.sampling_frequency * 1000; const audio_sample = { codec: 'aac', data: aac_frame }; if (this.audio_init_segment_dispatched_ == false) { this.audio_metadata_ = { codec: 'aac', audio_object_type: aac_frame.audio_object_type, sampling_freq_index: aac_frame.sampling_freq_index, sampling_frequency: aac_frame.sampling_frequency, channel_config: aac_frame.channel_config }; this.dispatchAudioInitSegment(audio_sample); } else if (this.detectAudioMetadataChange(audio_sample)) { // flush stashed frames before notify new AudioSpecificConfig this.dispatchAudioMediaSegment(); // notify new AAC AudioSpecificConfig this.dispatchAudioInitSegment(audio_sample); } last_sample_pts_ms = sample_pts_ms; let sample_pts_ms_int = Math.floor(sample_pts_ms); const arrayBuffer = new Uint8Array(aac_frame.data.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(aac_frame.data, 2); let aac_sample = { // unit: aac_frame.data, payload: arrayBuffer, length: arrayBuffer.byteLength, pts: sample_pts_ms_int, dts: sample_pts_ms_int, type: MEDIA_TYPE.audio }; this.audio_track_.samples.push(aac_sample); this.audio_track_.length += arrayBuffer.byteLength; sample_pts_ms += ref_sample_duration; } if (adts_parser.hasIncompleteData()) { this.aac_last_incomplete_data_ = adts_parser.getIncompleteData(); } if (last_sample_pts_ms) { this.audio_last_sample_pts_ = last_sample_pts_ms; } } parseLOASAACPayload(data, pts) { if (this.has_video_ && !this.video_init_segment_dispatched_) { // If first video IDR frame hasn't been detected, // Wait for first IDR frame and video init segment being dispatched return; } if (this.aac_last_incomplete_data_) { let buf = new Uint8Array(data.byteLength + this.aac_last_incomplete_data_.byteLength); buf.set(this.aac_last_incomplete_data_, 0); buf.set(data, this.aac_last_incomplete_data_.byteLength); data = buf; } let ref_sample_duration; let base_pts_ms; if (pts != undefined) { base_pts_ms = pts / this.timescale_; } if (this.audio_metadata_.codec === 'aac') { if (pts == undefined && this.audio_last_sample_pts_ != undefined) { ref_sample_duration = 1024 / this.audio_metadata_.sampling_frequency * 1000; base_pts_ms = this.audio_last_sample_pts_ + ref_sample_duration; } else if (pts == undefined) { this.player.debug.warn(this.TAG_NAME, `AAC: Unknown pts`); return; } if (this.aac_last_incomplete_data_ && this.audio_last_sample_pts_) { ref_sample_duration = 1024 / this.audio_metadata_.sampling_frequency * 1000; let new_pts_ms = this.audio_last_sample_pts_ + ref_sample_duration; if (Math.abs(new_pts_ms - base_pts_ms) > 1) { this.player.debug.warn(this.TAG, `AAC: Detected pts overlapped, ` + `expected: ${new_pts_ms}ms, PES pts: ${base_pts_ms}ms`); base_pts_ms = new_pts_ms; } } } let loas_parser = new AACLOASParser(data); let aac_frame = null; let sample_pts_ms = base_pts_ms; let last_sample_pts_ms; while ((aac_frame = loas_parser.readNextAACFrame(this.loas_previous_frame ?? undefined)) != null) { this.loas_previous_frame = aac_frame; ref_sample_duration = 1024 / aac_frame.sampling_frequency * 1000; const audio_sample = { codec: 'aac', data: aac_frame }; if (this.audio_init_segment_dispatched_ == false) { this.audio_metadata_ = { codec: 'aac', audio_object_type: aac_frame.audio_object_type, sampling_freq_index: aac_frame.sampling_freq_index, sampling_frequency: aac_frame.sampling_frequency, channel_config: aac_frame.channel_config }; this.dispatchAudioInitSegment(audio_sample); } else if (this.detectAudioMetadataChange(audio_sample)) { // flush stashed frames before notify new AudioSpecificConfig this.dispatchAudioMediaSegment(); // notify new AAC AudioSpecificConfig this.dispatchAudioInitSegment(audio_sample); } last_sample_pts_ms = sample_pts_ms; let sample_pts_ms_int = Math.floor(sample_pts_ms); const arrayBuffer = new Uint8Array(aac_frame.data.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(aac_frame.data, 2); let aac_sample = { // unit: aac_frame.data, payload: arrayBuffer, length: arrayBuffer.byteLength, pts: sample_pts_ms_int, dts: sample_pts_ms_int, type: MEDIA_TYPE.audio }; this.audio_track_.samples.push(aac_sample); this.audio_track_.length += arrayBuffer.byteLength; sample_pts_ms += ref_sample_duration; } if (loas_parser.hasIncompleteData()) { this.aac_last_incomplete_data_ = loas_parser.getIncompleteData(); } if (last_sample_pts_ms) { this.audio_last_sample_pts_ = last_sample_pts_ms; } } parseAC3Payload(data, pts) { // todo: 待开发 } parseEAC3Payload(data, pts) { // todo: 待开发 } parseOpusPayload(data, pts) { // todo: 待开发 } parseMP3Payload(data, pts) { if (this.has_video_ && !this.video_init_segment_dispatched_) { // If first video IDR frame hasn't been detected, // Wait for first IDR frame and video init segment being dispatched return; } let _mpegAudioV10SampleRateTable = [44100, 48000, 32000, 0]; let _mpegAudioV20SampleRateTable = [22050, 24000, 16000, 0]; let _mpegAudioV25SampleRateTable = [11025, 12000, 8000, 0]; let ver = data[1] >>> 3 & 0x03; let layer = (data[1] & 0x06) >> 1; (data[2] & 0xF0) >>> 4; let sampling_freq_index = (data[2] & 0x0C) >>> 2; let channel_mode = data[3] >>> 6 & 0x03; let channel_count = channel_mode !== 3 ? 2 : 1; let sample_rate = 0; let object_type = 34; // Layer-3, listed in MPEG-4 Audio Object Types switch (ver) { case 0: // MPEG 2.5 sample_rate = _mpegAudioV25SampleRateTable[sampling_freq_index]; break; case 2: // MPEG 2 sample_rate = _mpegAudioV20SampleRateTable[sampling_freq_index]; break; case 3: // MPEG 1 sample_rate = _mpegAudioV10SampleRateTable[sampling_freq_index]; break; } switch (layer) { case 1: // Layer 3 object_type = 34; break; case 2: // Layer 2 object_type = 33; break; case 3: // Layer 1 object_type = 32; break; } const sample = {}; sample.object_type = object_type; sample.sample_rate = sample_rate; sample.channel_count = channel_count; sample.data = data; const audio_sample = { codec: 'mp3', data: sample }; if (this.audio_init_segment_dispatched_ == false) { this.audio_metadata_ = { codec: 'mp3', object_type, sample_rate, channel_count }; this.dispatchAudioInitSegment(audio_sample); } else if (this.detectAudioMetadataChange(audio_sample)) { // flush stashed frames before notify new AudioSpecificConfig this.dispatchAudioMediaSegment(); // notify new AAC AudioSpecificConfig this.dispatchAudioInitSegment(audio_sample); } let mp3_sample = { // unit: data, payload: data, length: data.byteLength, pts: pts / this.timescale_, dts: pts / this.timescale_, type: MEDIA_TYPE.audio }; this.audio_track_.samples.push(mp3_sample); this.audio_track_.length += data.byteLength; } detectAudioMetadataChange(sample) { if (sample.codec !== this.audio_metadata_.codec) { this.player.debug.log(this.TAG_NAME, `Audio: Audio Codecs changed from ` + `${this.audio_metadata_.codec} to ${sample.codec}`); return true; } if (sample.codec === 'aac' && this.audio_metadata_.codec === 'aac') { const frame = sample.data; if (frame.audio_object_type !== this.audio_metadata_.audio_object_type) { this.player.debug.log(this.TAG_NAME, `AAC: AudioObjectType changed from ` + `${this.audio_metadata_.audio_object_type} to ${frame.audio_object_type}`); return true; } if (frame.sampling_freq_index !== this.audio_metadata_.sampling_freq_index) { this.player.debug.log(this.TAG_NAME, `AAC: SamplingFrequencyIndex changed from ` + `${this.audio_metadata_.sampling_freq_index} to ${frame.sampling_freq_index}`); return true; } if (frame.channel_config !== this.audio_metadata_.channel_config) { this.player.debug.log(this.TAG_NAME, `AAC: Channel configuration changed from ` + `${this.audio_metadata_.channel_config} to ${frame.channel_config}`); return true; } } else if (sample.codec === 'ac-3' && this.audio_metadata_.codec === 'ac-3') { const frame = sample.data; if (frame.sampling_frequency !== this.audio_metadata_.sampling_frequency) { this.player.debug.log(this.TAG_NAME, `AC3: Sampling Frequency changed from ` + `${this.audio_metadata_.sampling_frequency} to ${frame.sampling_frequency}`); return true; } if (frame.bit_stream_identification !== this.audio_metadata_.bit_stream_identification) { this.player.debug.log(this.TAG_NAME, `AC3: Bit Stream Identification changed from ` + `${this.audio_metadata_.bit_stream_identification} to ${frame.bit_stream_identification}`); return true; } if (frame.bit_stream_mode !== this.audio_metadata_.bit_stream_mode) { this.player.debug.log(this.TAG_NAME, `AC3: BitStream Mode changed from ` + `${this.audio_metadata_.bit_stream_mode} to ${frame.bit_stream_mode}`); return true; } if (frame.channel_mode !== this.audio_metadata_.channel_mode) { this.player.debug.log(this.TAG_NAME, `AC3: Channel Mode changed from ` + `${this.audio_metadata_.channel_mode} to ${frame.channel_mode}`); return true; } if (frame.low_frequency_effects_channel_on !== this.audio_metadata_.low_frequency_effects_channel_on) { this.player.debug.log(this.TAG_NAME, `AC3: Low Frequency Effects Channel On changed from ` + `${this.audio_metadata_.low_frequency_effects_channel_on} to ${frame.low_frequency_effects_channel_on}`); return true; } } else if (sample.codec === 'opus' && this.audio_metadata_.codec === 'opus') { const data = sample.meta; if (data.sample_rate !== this.audio_metadata_.sample_rate) { this.player.debug.log(this.TAG_NAME, `Opus: SamplingFrequencyIndex changed from ` + `${this.audio_metadata_.sample_rate} to ${data.sample_rate}`); return true; } if (data.channel_count !== this.audio_metadata_.channel_count) { this.player.debug.log(this.TAG_NAME, `Opus: Channel count changed from ` + `${this.audio_metadata_.channel_count} to ${data.channel_count}`); return true; } } else if (sample.codec === 'mp3' && this.audio_metadata_.codec === 'mp3') { const data = sample.data; if (data.object_type !== this.audio_metadata_.object_type) { this.player.debug.log(this.TAG_NAME, `MP3: AudioObjectType changed from ` + `${this.audio_metadata_.object_type} to ${data.object_type}`); return true; } if (data.sample_rate !== this.audio_metadata_.sample_rate) { this.player.debug.log(this.TAG_NAME, `MP3: SamplingFrequencyIndex changed from ` + `${this.audio_metadata_.sample_rate} to ${data.sample_rate}`); return true; } if (data.channel_count !== this.audio_metadata_.channel_count) { this.player.debug.log(this.TAG_NAME, `MP3: Channel count changed from ` + `${this.audio_metadata_.channel_count} to ${data.channel_count}`); return true; } } return false; } dispatchAudioInitSegment(sample) { let meta = {}; meta.type = 'audio'; meta.id = this.audio_track_.id; meta.timescale = 1000; meta.duration = this.duration_; let encTypeCode = ''; if (this.audio_metadata_.codec === 'aac') { let aac_frame = sample.codec === 'aac' ? sample.data : null; let audio_specific_config = new AudioSpecificConfig(aac_frame); meta.audioSampleRate = audio_specific_config.sampling_rate; meta.audioSampleRateIndex = audio_specific_config.sampling_index; meta.channelCount = audio_specific_config.channel_count; meta.codec = audio_specific_config.codec_mimetype; meta.originalCodec = audio_specific_config.original_codec_mimetype; meta.config = audio_specific_config.config; meta.refSampleDuration = 1024 / meta.audioSampleRate * meta.timescale; encTypeCode = AUDIO_ENC_CODE.AAC; const aacADTSHeader = aacEncoderConfigurationRecordV2({ profile: this.player._opt.mseDecodeAudio ? audio_specific_config.object_type : audio_specific_config.original_object_type, sampleRate: meta.audioSampleRateIndex, channel: meta.channelCount }); console.error('aacADTSHeader', `profile: ${audio_specific_config.object_type}, sampleRate: ${meta.audioSampleRateIndex}, channel: ${meta.channelCount}`); this._doDecodeByTs(aacADTSHeader, MEDIA_TYPE.audio, 0, false, 0); } else if (this.audio_metadata_.codec === 'ac-3') ; else if (this.audio_metadata_.codec === 'ec-3') ; else if (this.audio_metadata_.codec === 'opus') ; else if (this.audio_metadata_.codec === 'mp3') { meta.audioSampleRate = this.audio_metadata_.sample_rate; meta.channelCount = this.audio_metadata_.channel_count; meta.codec = 'mp3'; meta.originalCodec = 'mp3'; meta.config = undefined; encTypeCode = AUDIO_ENC_CODE.MP3; } if (this.audio_init_segment_dispatched_ == false) { this.player.debug.log(this.TAG_NAME, `Generated first AudioSpecificConfig for mimeType: ${meta.codec}`); } // todo:待开发 // this.onTrackMetadata('audio', meta); // console.error('audio', meta); this.audio_init_segment_dispatched_ = true; this.video_metadata_changed_ = false; // notify new MediaInfo let mi = this.media_info_; mi.hasAudio = true; mi.audioCodec = meta.originalCodec; mi.audioSampleRate = meta.audioSampleRate; mi.audioChannelCount = meta.channelCount; if (mi.hasVideo && mi.videoCodec) { mi.mimeType = `video/mp2t; codecs="${mi.videoCodec},${mi.audioCodec}"`; } else { mi.mimeType = `video/mp2t; codecs="${mi.audioCodec}"`; } if (encTypeCode) { this.player.audio.updateAudioInfo({ encTypeCode, channels: meta.channelCount, sampleRate: meta.audioSampleRate }); } // todo 待完成 // console.error('todo:dispatchAudioInitSegment', mi, mi.isComplete()); } dispatchPESPrivateDataDescriptor(pid, stream_type, descriptor) { // todo: 待解决 } parsePESPrivateDataPayload(data, pts, dts, pid, stream_id) { let private_data = new PESPrivateData(); private_data.pid = pid; private_data.stream_id = stream_id; private_data.len = data.byteLength; private_data.data = data; if (pts != undefined) { let pts_ms = Math.floor(pts / this.timescale_); private_data.pts = pts_ms; } else { private_data.nearest_pts = this.getNearestTimestampMilliseconds(); } if (dts != undefined) { let dts_ms = Math.floor(dts / this.timescale_); private_data.dts = dts_ms; } // todo:待解决 // console.error('todo :parsePESPrivateDataPayload', private_data); } parseTimedID3MetadataPayload(data, pts, dts, pid, stream_id) { // todo 待开发 this.player.debug.log(this.TAG_NAME, `Timed ID3 Metadata: pid=${pid}, pts=${pts}, dts=${dts}, stream_id=${stream_id}`); } parseSynchronousKLVMetadataPayload(data, pts, dts, pid, stream_id) { this.player.debug.log(this.TAG_NAME, `Synchronous KLV Metadata: pid=${pid}, pts=${pts}, dts=${dts}, stream_id=${stream_id}`); } parseAsynchronousKLVMetadataPayload(data, pid, stream_id) { // todo: 待开发 this.player.debug.log(this.TAG_NAME, `Asynchronous KLV Metadata: pid=${pid}, stream_id=${stream_id}`); } parseSMPTE2038MetadataPayload(data, pts, dts, pid, stream_id) { // todo: 待开发 this.player.debug.log(this.TAG_NAME, `SMPTE 2038 Metadata: pid=${pid}, pts=${pts}, dts=${dts}, stream_id=${stream_id}`); } getNearestTimestampMilliseconds() { // Prefer using last audio sample pts if audio track exists if (this.audio_last_sample_pts_ != undefined) { return Math.floor(this.audio_last_sample_pts_); } else if (this.last_pcr_ != undefined) { // Fallback to PCR time if audio track doesn't exist const pcr_time_ms = Math.floor(this.last_pcr_ / 300 / this.timescale_); return pcr_time_ms; } return undefined; } // _preDoDecode() { const videoTrack = this.video_track_; const audioTrack = this.audio_track_; let allSampleList = videoTrack.samples; if (audioTrack.samples.length > 0) { allSampleList = videoTrack.samples.concat(audioTrack.samples); allSampleList = allSampleList.sort((a, b) => { return a.dts - b.dts; }); } allSampleList.forEach(sample => { const arrayBuffer = new Uint8Array(sample.payload); delete sample.payload; if (sample.type === MEDIA_TYPE.video) { this._doDecodeVideo({ ...sample, payload: arrayBuffer }); } else if (sample.type === MEDIA_TYPE.audio) { this._doDecodeAudio({ ...sample, payload: arrayBuffer }); } }); // clear videoTrack.samples = []; videoTrack.length = 0; audioTrack.samples = []; audioTrack.length = 0; } _doDecodeVideo(sample) { const uint8Array = new Uint8Array(sample.payload); let packet = null; if (sample.isHevc) { // add 5 header packet = hevcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } else { packet = avcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } this.player.updateStats({ dts: sample.dts, vbps: packet.byteLength }); if (sample.isIFrame) { this.calcIframeIntervalTimestamp(sample.dts); } let payloadBuffer = this.cryptoPayload(packet, sample.isIFrame); this._doDecodeByTs(payloadBuffer, MEDIA_TYPE.video, sample.dts, sample.isIFrame, sample.cts); } _doDecodeAudio(sample) { const uint8Array = new Uint8Array(sample.payload); this.player.updateStats({ abps: uint8Array.byteLength }); let payloadBuffer = uint8Array; if (isTrue(this.player._opt.m7sCryptoAudio)) { payloadBuffer = this.cryptoPayloadAudio(uint8Array); } this._doDecodeByTs(payloadBuffer, MEDIA_TYPE.audio, sample.dts, false, 0); } } class Demux { constructor(player) { const Loader = Demux.getLoaderFactory(player); return new Loader(player); } static getLoaderFactory(player) { const type = player._opt.demuxType; if (type === DEMUX_TYPE.m7s) { return M7sLoader; } else if (type === DEMUX_TYPE.flv || player.isWebrtcH265()) { return FlvLoader; } else if (type === DEMUX_TYPE.webTransport) { return WebTransportLoader; } else if (type === DEMUX_TYPE.nakedFlow) { return NakedFlowLoader; } else if (type === DEMUX_TYPE.fmp4) { return Fmp4Loader; } else if (type === DEMUX_TYPE.mpeg4) { return Mpeg4Loader; } else if (type === DEMUX_TYPE.ts) { return TsLoaderV2; // return TsLoader; } else { return EmptyLoader; } } } class WebcodecsDecoder extends Emitter { constructor(player) { super(); this.player = player; this.TAG_NAME = 'Webcodecs'; this.hasInit = false; this.isDecodeFirstIIframe = isFalse(player._opt.checkFirstIFrame) ? true : false; this.isInitInfo = false; this.prevTimestamp = null; this.decodeDiffTimestamp = null; this.prevDts = null; this.decoder = null; this.isWidthOrHeightChanged = false; this.initDecoder(); player.debug.log(this.TAG_NAME, 'init'); } destroy() { if (this.decoder) { if (isFalse(this.isDecodeStateClosed())) { this.decoder.close(); } this.decoder = null; } this.prevTimestamp = null; this.decodeDiffTimestamp = null; this.prevDts = null; this.hasInit = false; this.isInitInfo = false; this.isDecodeFirstIIframe = false; this.isWidthOrHeightChanged = false; this.off(); this.player.debug.log(this.TAG_NAME, 'destroy'); } initDecoder() { const _this = this; this.decoder = new VideoDecoder({ output(videoFrame) { _this.handleDecode(videoFrame); }, error(error) { _this.handleError(error); } }); } handleDecode(videoFrame) { if (!this.isInitInfo) { this.player.video.updateVideoInfo({ width: videoFrame.codedWidth, height: videoFrame.codedHeight }); this.player.video.initCanvasViewSize(); this.isInitInfo = true; } // console.log(this.TAG_NAME, 'handleDecode duration and timestamp', videoFrame.duration, videoFrame.timestamp) // player if (this.player.isPlayer()) { this.player.updateStats({ dfps: true }); if (!this.player._times.videoStart) { this.player._times.videoStart = now$2(); this.player.handlePlayToRenderTimes(); } this.player.video.render({ videoFrame, ts: videoFrame.timestamp }); this.player.handleRender(); } else if (this.player.isPlayback()) { // playback this.player.updateStats({ dfps: true }); if (isFalse(this.player.playbackPause)) { // is not pause if (this.player.playback.isUseLocalCalculateTime) { this.player.playback.increaseLocalTimestamp(); } if (this.player.playback.isUseFpsRender) { this.player.video.pushData({ videoFrame, ts: videoFrame.timestamp }); } else { this.player.video.render$2({ videoFrame, ts: videoFrame.timestamp }); } } else { // 暂停的时候,如果不需要清除缓存 if (isFalse(this.player.playback.isPlaybackPauseClearCache) && this.player.playback.isCacheBeforeDecodeForFpsRender) { if (this.player.playback.isUseFpsRender) { this.player.video.pushData({ videoFrame, ts: videoFrame.timestamp }); } } } } } handleError(error) { this.player.debug.error(this.TAG_NAME, 'VideoDecoder handleError:', error.code, error); const errorString = error.toString(); if (errorString.indexOf(WCS_ERROR.unsupportedConfiguration) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsUnsupportedConfigurationError, errorString); } else if (errorString.indexOf(WCS_ERROR.decoderFailure) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsDecodeError, errorString); } else if (errorString.indexOf(WCS_ERROR.decodingError) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsDecodeError, errorString); } else if (errorString.indexOf(WCS_ERROR.decoderError) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsDecodeError, errorString); } else if (errorString.indexOf(WCS_ERROR.hevcDecodingIsNotSupported) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsH265NotSupport); } } decodeVideo(payload, ts, isIframe, cts) { // this.player.debug.log('Webcodecs decoder', 'decodeVideo', ts, isIframe); const player = this.player; if (!player) { return; } if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo() player is destroyed'); return; } if (!this.hasInit) { if (isIframe && payload[1] === AVC_PACKET_TYPE.sequenceHeader) { const videoCodec = payload[0] & 0x0F; this.player.video.updateVideoInfo({ encTypeCode: videoCodec }); // 如果解码出来的是 if (videoCodec === VIDEO_ENC_CODE.h265 && !supportWCSDecodeHevc()) { const browserInfo = getBrowser(); this.player.debug.warn(this.TAG_NAME, 'WebcodecsDecoder not support hevc decode', browserInfo.type, browserInfo.version); this.player.emitError(EVENTS_ERROR.webcodecsH265NotSupport); return; } if (!this.player._times.decodeStart) { this.player._times.decodeStart = now$2(); } let config = null; let videInfo = null; const extraData = payload.slice(5); if (videoCodec === VIDEO_ENC_CODE.h264) { // config = formatAvcVideoDecoderConfigure(extraData); videInfo = parseAVCDecoderConfigurationRecord(extraData); config = { codec: videInfo.codec, description: extraData }; } else if (videoCodec === VIDEO_ENC_CODE.h265) { // config = formatHevcVideoDecoderConfigure(extraData); videInfo = parseHEVCDecoderConfigurationRecord$4(extraData); config = { codec: videInfo.codec, description: extraData }; } if (videInfo && videInfo.codecWidth && videInfo.codecHeight) { config.codedHeight = videInfo.codecHeight; config.codedWidth = videInfo.codecWidth; } if (this.player.recorder && this.player._opt.recordType === FILE_SUFFIX.mp4) { this.player.recorder.initMetaData(payload, videoCodec); } if (!config || config && !config.codec) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo and webcodecs configure is', JSON.stringify(config)); this.player.emitError(EVENTS_ERROR.webcodecsDecodeConfigureError); return; } // console.log('webcodecs decodeVideo config is ', config); this.player.debug.log(this.TAG_NAME, `decoder.configure() and codec is ${config.codec}`); try { this.decoder.configure(config); this.hasInit = true; } catch (e) { this.player.debug.log(this.TAG_NAME, 'configure error', e.code, e); const errorString = e.toString(); if (errorString.indexOf(WCS_ERROR.hevcDecodingIsNotSupported) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsH265NotSupport); } else { this.player.emitError(EVENTS_ERROR.webcodecsDecodeConfigureError); } } } } else { // fix : Uncaught DOMException: Failed to execute 'decode' on 'VideoDecoder': A key frame is required after configure() or flush(). if (!this.isDecodeFirstIIframe && isIframe) { this.isDecodeFirstIIframe = true; } if (this.isDecodeFirstIIframe) { if (this.isDecodeStateClosed()) { this.player.debug.warn(this.TAG_NAME, 'VideoDecoder isDecodeStateClosed true'); return; } if (isIframe && payload[1] === 0) { const videoCodec = payload[0] & 0x0F; let config = {}; if (videoCodec === VIDEO_ENC_CODE.h264) { let data = payload.slice(5); config = parseAVCDecoderConfigurationRecord(data); } else if (videoCodec === VIDEO_ENC_CODE.h265) { config = parseHEVCDecoderConfigurationRecord$2(payload); } const videoInfo = this.player.video.videoInfo; if (videoInfo && videoInfo.width && videoInfo.height && config && config.codecWidth && config.codecHeight && (config.codecWidth !== videoInfo.width || config.codecHeight !== videoInfo.height)) { this.player.debug.warn(this.TAG_NAME, `decodeVideo: video width or height is changed, old width is ${videoInfo.width}, old height is ${videoInfo.height}, new width is ${config.codecWidth}, new height is ${config.codecHeight}, and emit change event`); this.isWidthOrHeightChanged = true; this.player.emitError(EVENTS_ERROR.wcsWidthOrHeightChange); } } if (this.isWidthOrHeightChanged) { this.player.debug.warn(this.TAG_NAME, `decodeVideo: video width or height is changed, and return`); return; } if (isVideoSequenceHeader(payload)) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo and payload is video sequence header so drop this frame'); return; } if (payload.byteLength < VIDEO_PAYLOAD_MIN_SIZE) { this.player.debug.warn(this.TAG_NAME, `decodeVideo and payload is too small , payload length is ${payload.byteLength}`); return; } let isFirst = false; let nowTime = new Date().getTime(); if (!this.prevTimestamp) { this.prevTimestamp = nowTime; isFirst = true; } const diffTime = nowTime - this.prevTimestamp; this.decodeDiffTimestamp = diffTime; if (diffTime > 500 && !isFirst && this.player.isPlayer()) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo diff time is ', diffTime); } const buffer = payload.slice(5); const chunk = new EncodedVideoChunk({ data: buffer, timestamp: ts, type: isIframe ? ENCODED_VIDEO_TYPE.key : ENCODED_VIDEO_TYPE.delta }); this.player.emit(EVENTS.timeUpdate, ts); // if (this.player.recorder && // this.player.recorder.isRecording && // this.player._opt.recordType === FILE_SUFFIX.mp4) { // this.player.recorder.handleAddNaluTrack(buffer, isIframe, ts, cts); // } try { this.decoder.decode(chunk); } catch (e) { this.player.debug.error(this.TAG_NAME, 'VideoDecoder', e); const errorString = e.toString(); if (errorString.indexOf(WCS_ERROR.keyframeIsRequiredError) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsDecodeError); } else if (errorString.indexOf(WCS_ERROR.canNotDecodeClosedCodec) !== -1) { this.player.emitError(EVENTS_ERROR.webcodecsDecodeError); } } this.prevTimestamp = new Date().getTime(); } else { this.player.debug.log(this.TAG_NAME, 'VideoDecoder first frame is not iFrame'); } } } getDecodeDiffTimes() { return this.decodeDiffTimestamp; } isDecodeStateClosed() { return this.decoder.state === 'closed'; } isDecodeStateConfigured() { return this.decoder.state === 'configured'; } isDecodeStateUnConfigured() { return this.decoder.state === 'unconfigured'; } } const iconsMap = { play: '播放', pause: '暂停', audio: '', mute: '', screenshot: '截图', loading: '', fullscreen: '全屏', fullscreenExit: '退出全屏', record: '录制', recordStop: '停止录制', narrow: '缩小', expand: '放大', ptz: '操作盘', ptzActive: '操作盘激活', zoom: '电子放大', zoomStop: '关闭电子放大', close: '关闭', performance: '性能面板', performanceActive: '性能面板激活', face: '人脸识别', faceActive: '人脸识别激活', object: '物品识别', objectActive: '物品识别激活', occlusion: '遮挡物检查', occlusionActive: '遮挡物检查激活', logSave: '保存日志' }; var icons = Object.keys(iconsMap).reduce((icons, key) => { icons[key] = ` ${iconsMap[key] ? `${iconsMap[key]}` : ''} `; return icons; }, {}); var template = ((player, control) => { if (player._opt.hasControl && player._opt.controlAutoHide) { player.$container.classList.add('jb-pro-controls-show-auto-hide'); } else { player.$container.classList.add('jb-pro-controls-show'); } const options = player._opt; const operateBtns = options.operateBtns; const playbackOperateDom = `
00:00:00
${options.playbackConfig.showPrecisionBtn ? `
${icons.narrow}
${icons.expand}
` : ''}
`; const playbackOperateSimpleDom = `
00:00
`; player.$container.insertAdjacentHTML('beforeend', ` ${options.background ? `
` : ''}
${options.loadingIcon ? `
${icons.loading}
${options.loadingText}
` : ''} ${options.hasControl && operateBtns.play ? `
` : ''} ${options.hasControl && operateBtns.ptz ? `
${options.ptzMoreArrowShow ? `
` : ''}
${options.ptzZoomShow ? `
缩放+
缩放-
` : ''} ${options.ptzApertureShow ? `
光圈+
光圈-
` : ''} ${options.ptzFocusShow ? `
聚焦+
聚焦-
` : ''} ${options.ptzCruiseShow ? `
巡航开
巡航关
` : ''} ${options.ptzFogShow ? `
透雾开
透雾关
` : ''} ${options.ptzWiperShow ? `
雨刷开
雨刷关
` : ''}
` : ''} ${options.hasVideo ? `
${icons.narrow}
电子放大
${icons.expand}
${icons.zoomStop}
00:00:00
${icons.recordStop}
` : ''} ${options.hasControl ? `
${options.showBandwidth ? `
` : ''}
${options.controlHtml}
${options.playType === PLAY_TYPE.playbackTF && options.playbackConfig.showControl && options.playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.normal ? playbackOperateDom : ''} ${options.playType === PLAY_TYPE.playbackTF && options.playbackConfig.showControl && options.playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.simple ? playbackOperateSimpleDom : ''}
${options.playType === PLAY_TYPE.playbackTF && options.playbackConfig.showRateBtn ? `
` : ''} ${operateBtns.close ? `
${icons.close}
` : ''} ${operateBtns.logSave ? `
${icons.logSave}
` : ''} ${operateBtns.performance ? `
${icons.performance}
${icons.performanceActive}
` : ''} ${operateBtns.aiFace ? `
${icons.face}
${icons.faceActive}
` : ''} ${operateBtns.aiObject ? `
${icons.object}
${icons.objectActive}
` : ''} ${operateBtns.aiOcclusion ? `
${icons.occlusion}
${icons.occlusionActive}
` : ''} ${operateBtns.quality ? `
` : ''} ${operateBtns.scale ? `
` : ''} ${operateBtns.audio ? `
${icons.audio} ${icons.mute}
` : ''} ${operateBtns.play ? `
${icons.play}
${icons.pause}
` : ''} ${operateBtns.screenshot ? `
${icons.screenshot}
` : ''} ${operateBtns.record ? `
${icons.record}
${icons.recordStop}
` : ''} ${operateBtns.ptz ? `
${icons.ptz}
${icons.ptzActive}
` : ''} ${operateBtns.zoom ? `
${icons.zoom}
${icons.zoomStop}
` : ''} ${operateBtns.fullscreen ? `
${icons.fullscreen}
${icons.fullscreenExit}
` : ''}
` : ''}
`); Object.defineProperty(control, '$poster', { value: player.$container.querySelector('.jb-pro-poster') }); Object.defineProperty(control, '$loadingBg', { value: player.$container.querySelector('.jb-pro-loading-bg') }); Object.defineProperty(control, '$loadingBgImage', { value: player.$container.querySelector('.jb-pro-loading-bg-image') }); Object.defineProperty(control, '$loading', { value: player.$container.querySelector('.jb-pro-loading') }); Object.defineProperty(control, '$loadingText', { value: player.$container.querySelector('.jb-pro-loading-text') }); Object.defineProperty(control, '$play', { value: player.$container.querySelector('.jb-pro-play') }); Object.defineProperty(control, '$playBig', { value: player.$container.querySelector('.jb-pro-play-big') }); Object.defineProperty(control, '$recording', { value: player.$container.querySelector('.jb-pro-recording') }); Object.defineProperty(control, '$recordingTime', { value: player.$container.querySelector('.jb-pro-recording-time') }); Object.defineProperty(control, '$recordingStop', { value: player.$container.querySelector('.jb-pro-recording-stop') }); Object.defineProperty(control, '$pause', { value: player.$container.querySelector('.jb-pro-pause') }); Object.defineProperty(control, '$controls', { value: player.$container.querySelector('.jb-pro-controls') }); Object.defineProperty(control, '$controlsInner', { value: player.$container.querySelector('.jb-pro-controls-bottom') }); Object.defineProperty(control, '$controlsLeft', { value: player.$container.querySelector('.jb-pro-controls-left') }); Object.defineProperty(control, '$controlsRight', { value: player.$container.querySelector('.jb-pro-controls-right') }); Object.defineProperty(control, '$volume', { value: player.$container.querySelector('.jb-pro-volume') }); Object.defineProperty(control, '$volumePanelWrap', { value: player.$container.querySelector('.jb-pro-volume-panel-wrap') }); Object.defineProperty(control, '$volumePanelText', { value: player.$container.querySelector('.jb-pro-volume-panel-text') }); Object.defineProperty(control, '$volumePanel', { value: player.$container.querySelector('.jb-pro-volume-panel') }); Object.defineProperty(control, '$volumeHandle', { value: player.$container.querySelector('.jb-pro-volume-panel-handle') }); Object.defineProperty(control, '$volumeOn', { value: player.$container.querySelector('.jb-pro-icon-audio') }); Object.defineProperty(control, '$volumeOff', { value: player.$container.querySelector('.jb-pro-icon-mute') }); Object.defineProperty(control, '$fullscreen', { value: player.$container.querySelector('.jb-pro-fullscreen') }); Object.defineProperty(control, '$fullscreenExit', { value: player.$container.querySelector('.jb-pro-fullscreen-exit') }); Object.defineProperty(control, '$record', { value: player.$container.querySelector('.jb-pro-record') }); Object.defineProperty(control, '$recordStop', { value: player.$container.querySelector('.jb-pro-record-stop') }); Object.defineProperty(control, '$screenshot', { value: player.$container.querySelector('.jb-pro-screenshot') }); Object.defineProperty(control, '$speed', { value: player.$container.querySelector('.jb-pro-speed') }); Object.defineProperty(control, '$controlHtml', { value: player.$container.querySelector('.jb-pro-controls-item-html') }); Object.defineProperty(control, '$playbackTime', { value: player.$container.querySelector('.jb-pro-controls-playback-time') }); Object.defineProperty(control, '$playbackTimeInner', { value: player.$container.querySelector('.jb-pro-controls-playback-time-inner') }); Object.defineProperty(control, '$playbackTimeScroll', { value: player.$container.querySelector('.jb-pro-controls-playback-time-scroll') }); Object.defineProperty(control, '$playbackTimeList', { value: player.$container.querySelector('.jb-pro-controls-playback-time-list') }); Object.defineProperty(control, '$playbackTimeListOne', { value: player.$container.querySelector('.jb-pro-playback-time-one-wrap') }); Object.defineProperty(control, '$playbackTimeListSecond', { value: player.$container.querySelector('.jb-pro-playback-time-second-wrap') }); Object.defineProperty(control, '$playbackCurrentTime', { value: player.$container.querySelector('.jb-pro-controls-playback-current-time') }); Object.defineProperty(control, '$playbackCurrentTimeText', { value: player.$container.querySelector('.jb-pro-controls-playback-current-time-text') }); Object.defineProperty(control, '$controlsPlaybackBtns', { value: player.$container.querySelector('.jb-pro-controls-playback-btns') }); Object.defineProperty(control, '$playbackNarrow', { value: player.$container.querySelector('.jb-pro-playback-narrow') }); // Object.defineProperty(control, '$playbackExpand', { value: player.$container.querySelector('.jb-pro-playback-expand') }); Object.defineProperty(control, '$ptz', { value: player.$container.querySelector('.jb-pro-ptz') }); Object.defineProperty(control, '$ptzActive', { value: player.$container.querySelector('.jb-pro-ptz-active') }); Object.defineProperty(control, '$ptzControl', { value: player.$container.querySelector('.jb-pro-ptz-controls') }); Object.defineProperty(control, '$ptzBgActive', { value: player.$container.querySelector('.jb-pro-ptz-bg-active') }); Object.defineProperty(control, '$ptzControlCircular', { value: player.$container.querySelector('.jb-pro-ptz-control') }); Object.defineProperty(control, '$ptzArrows', { value: player.$container.querySelectorAll('.jb-pro-ptz-arrow') }); Object.defineProperty(control, '$ptzExpand', { value: player.$container.querySelector('.jb-pro-ptz-expand') }); Object.defineProperty(control, '$ptzNarrow', { value: player.$container.querySelector('.jb-pro-ptz-narrow') }); Object.defineProperty(control, '$ptzApertureFar', { value: player.$container.querySelector('.jb-pro-ptz-aperture-far') }); Object.defineProperty(control, '$ptzApertureNear', { value: player.$container.querySelector('.jb-pro-ptz-aperture-near') }); Object.defineProperty(control, '$ptzFocusFar', { value: player.$container.querySelector('.jb-pro-ptz-focus-far') }); Object.defineProperty(control, '$ptzFocusNear', { value: player.$container.querySelector('.jb-pro-ptz-focus-near') }); Object.defineProperty(control, '$ptzCruisePlay', { value: player.$container.querySelector('.jb-pro-ptz-cruise-play') }); Object.defineProperty(control, '$ptzCruisePause', { value: player.$container.querySelector('.jb-pro-ptz-cruise-pause') }); Object.defineProperty(control, '$ptzFogOpen', { value: player.$container.querySelector('.jb-pro-ptz-fog-open') }); Object.defineProperty(control, '$ptzFogClose', { value: player.$container.querySelector('.jb-pro-ptz-fog-close') }); Object.defineProperty(control, '$ptzWiperOpen', { value: player.$container.querySelector('.jb-pro-ptz-wiper-open') }); Object.defineProperty(control, '$ptzWiperClose', { value: player.$container.querySelector('.jb-pro-ptz-wiper-close') }); Object.defineProperty(control, '$qualityText', { value: player.$container.querySelector('.jb-pro-quality-icon-text') }); Object.defineProperty(control, '$qualityMenu', { value: player.$container.querySelector('.jb-pro-quality-menu') }); Object.defineProperty(control, '$qualityMenuList', { value: player.$container.querySelector('.jb-pro-quality-menu-list') }); Object.defineProperty(control, '$scaleText', { value: player.$container.querySelector('.jb-pro-scale-icon-text') }); Object.defineProperty(control, '$scaleMenu', { value: player.$container.querySelector('.jb-pro-scale-menu') }); Object.defineProperty(control, '$scaleMenuList', { value: player.$container.querySelector('.jb-pro-scale-menu-list') }); Object.defineProperty(control, '$zoom', { value: player.$container.querySelector('.jb-pro-zoom') }); Object.defineProperty(control, '$zoomStop', { value: player.$container.querySelector('.jb-pro-zoom-stop') }); Object.defineProperty(control, '$zoomNarrow', { value: player.$container.querySelector('.jb-pro-zoom-narrow') }); Object.defineProperty(control, '$zoomExpand', { value: player.$container.querySelector('.jb-pro-zoom-expand') }); Object.defineProperty(control, '$zoomStop2', { value: player.$container.querySelector('.jb-pro-zoom-stop2') }); Object.defineProperty(control, '$close', { value: player.$container.querySelector('.jb-pro-close') }); Object.defineProperty(control, '$zoomControls', { value: player.$container.querySelector('.jb-pro-zoom-controls') }); Object.defineProperty(control, '$performancePanel', { value: player.$container.querySelector('.jb-pro-performance-panel') }); Object.defineProperty(control, '$performance', { value: player.$container.querySelector('.jb-pro-performance') }); Object.defineProperty(control, '$performanceActive', { value: player.$container.querySelector('.jb-pro-performance-active') }); Object.defineProperty(control, '$faceDetect', { value: player.$container.querySelector('.jb-pro-face') }); Object.defineProperty(control, '$faceDetectActive', { value: player.$container.querySelector('.jb-pro-face-active') }); Object.defineProperty(control, '$objectDetect', { value: player.$container.querySelector('.jb-pro-object') }); Object.defineProperty(control, '$objectDetectActive', { value: player.$container.querySelector('.jb-pro-object-active') }); Object.defineProperty(control, '$occlusionDetect', { value: player.$container.querySelector('.jb-pro-occlusion') }); Object.defineProperty(control, '$occlusionDetectActive', { value: player.$container.querySelector('.jb-pro-occlusion-active') }); Object.defineProperty(control, '$contextmenus', { value: player.$container.querySelector('.jb-pro-contextmenus') }); Object.defineProperty(control, '$speedText', { value: player.$container.querySelector('.jb-pro-speed-icon-text') }); Object.defineProperty(control, '$speedMenu', { value: player.$container.querySelector('.jb-pro-speed-menu') }); Object.defineProperty(control, '$speedMenuList', { value: player.$container.querySelector('.jb-pro-speed-menu-list') }); Object.defineProperty(control, '$logSave', { value: player.$container.querySelector('.jb-pro-logSave') }); Object.defineProperty(control, '$playbackProgress', { value: player.$container.querySelector('.jb-pro-control-progress-simple') }); Object.defineProperty(control, '$playbackProgressTip', { value: player.$container.querySelector('.jb-pro-progress-tip') }); Object.defineProperty(control, '$playbackProgressHover', { value: player.$container.querySelector('.jb-pro-progress-hover') }); Object.defineProperty(control, '$playbackProgressPlayed', { value: player.$container.querySelector('.jb-pro-progress-played') }); Object.defineProperty(control, '$playbackProgressIndicator', { value: player.$container.querySelector('.jb-pro-progress-indicator') }); Object.defineProperty(control, '$playbackProgressTime', { value: player.$container.querySelector('.jb-pro-playback-control-time') }); Object.defineProperty(control, '$tipsMessage', { value: player.$container.querySelector('.jb-pro-tips-message') }); Object.defineProperty(control, '$tipsMessageClose', { value: player.$container.querySelector('.jb-pro-tips-message-close') }); Object.defineProperty(control, '$tipsMessageContent', { value: player.$container.querySelector('.jb-pro-tips-message-content') }); }); // 24小时,以一个小时为单位的。 function _isInPlayList(list, time) { let result = false; list.forEach(item => { if (!result) { if (item.startTimestamp <= time && item.endTimestamp > time) { result = true; } } }); return result; } // 一个小时 60个格子 function initHourMinList1() { let playbackList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; let result = []; const playbackItem = playbackList[0] || {}; const oneDay = playbackItem.startTimestamp; for (let i = 0; i < 24 * 60; i++) { const isStart = i % 60 === 0; let hasRecord = false; if (oneDay) { const tempMin = formatMinuteTimestamp(oneDay, i); hasRecord = _isInPlayList(playbackList, tempMin); } result.push({ title: formatMinTimeTips(i), timestamp: i, dataType: 'min', hasRecord, isStart }); } return result; } // 半个小时 60个格式 function initHalfHourMinSecondList1() { let playbackList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; let result = []; const playbackItem = playbackList[0] || {}; const oneDay = playbackItem.startTimestamp; for (let i = 0; i < 24 * 60; i++) { let second = i * 60; let isStart = second % (30 * 60) === 0; let hasRecord = false; if (oneDay) { const tempSecond = formatSecondTimestamp(oneDay, second); hasRecord = _isInPlayList(playbackList, tempSecond); } result.push({ title: formatSecondTimeTips(second), timestamp: second, dataType: 'second', hasRecord, isStart }); let second2 = i * 60 + 30; isStart = second2 % (30 * 60) === 0; if (oneDay) { const tempSecond2 = formatSecondTimestamp(oneDay, second2); hasRecord = _isInPlayList(playbackList, tempSecond2); } result.push({ title: formatSecondTimeTips(second2), timestamp: second2, dataType: 'second', hasRecord, isStart }); } return result; } // 十分钟 60个格子 function initTenMinSecondList1() { let playbackList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; // let result = []; const playbackItem = playbackList[0] || {}; const oneDay = playbackItem.startTimestamp; // 时 for (let i = 0; i < 24 * 6; i++) { // 内层 for (let j = 0; j < 60; j++) { let step = 10 * j + i * 600; let isStart = step % (10 * 60) === 0; let hasRecord = false; if (oneDay) { const tempSecond = formatSecondTimestamp(oneDay, step); hasRecord = _isInPlayList(playbackList, tempSecond); } result.push({ title: formatSecondTimeTips(step), timestamp: step, dataType: 'second', isStart, hasRecord }); } } return result; } // 五分钟 60个格子 function initFiveMinSecondList1() { let playbackList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; // 外层 23个 小时 let result = []; const playbackItem = playbackList[0] || {}; const oneDay = playbackItem.startTimestamp; for (let i = 0; i < 24 * 12; i++) { // 内层 60个格子 for (let j = 0; j < 60; j++) { let step = 5 * j + i * 300; let isStart = step % (5 * 60) === 0; let hasRecord = false; if (oneDay) { const tempSecond = formatSecondTimestamp(oneDay, step); hasRecord = _isInPlayList(playbackList, tempSecond); } result.push({ title: formatSecondTimeTips(step), timestamp: step, dataType: 'second', isStart, hasRecord }); } } return result; } // 24小时 以一个小时间隔 function initHourList2() { let result = []; for (let i = 0; i < 24; i++) { let title = i + ":00"; if (i < 10) { title = "0" + title; } result.push({ title: title, hour: i, min: 0, second: 0 }); } return result; } // 24小时 以半个小时间隔 function initHalfHourList2() { let result = []; for (let i = 0; i < 24; i++) { let title = i + ":00"; let titleHalf = i + ':30'; if (i < 10) { title = "0" + title; titleHalf = '0' + titleHalf; } result.push({ title: title, hour: i, min: 0, second: 0 }); result.push({ title: titleHalf, hour: i, min: 30, second: 0 }); } return result; } // 24小时,10分钟为间隔 function initTenMinList2() { let result = []; for (let i = 0; i < 24; i++) { let title = i + ":00"; if (i < 10) { title = "0" + title; } result.push({ title: title, hour: i, min: 0, second: 0 }); for (let j = 1; j < 6; j++) { let tenMin = '' + j + '0'; result.push({ title: title.replace(':00', ':' + tenMin), hour: i, min: j * 10, second: 0 }); } } return result; } // 24小时,5分钟为间隔 function initFileMinList2() { let result = []; for (let i = 0; i < 24; i++) { let title = i + ":00"; if (i < 10) { title = "0" + title; } result.push({ title: title, hour: i, min: 0, second: 0 }); result.push({ title: title.replace(':00', ':05'), hour: i, min: 5, second: 0 }); for (let j = 1; j < 6; j++) { let tenMin = '' + j + '0'; let fileMin = j + '5'; result.push({ title: title.replace(':00', ':' + tenMin), hour: i, min: j * 10, second: 0 }); result.push({ title: title.replace(':00', ':' + fileMin), hour: i, min: j * 10 + 5, second: 0 }); } } return result; } function renderTimeDay() { let oneList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; let secondList = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; let control = arguments.length > 2 ? arguments[2] : undefined; const oneListLength = oneList.length; const secondListLength = secondList.length; const total = Math.max(oneListLength, secondListLength); const once = PLAYBACK_RENDER_ONCE_LENGTH; const loopCount = Math.ceil(total / once); let countRender = 0; let currentIndex = 0; function render() { let fragment = ''; let fragment2 = ''; for (let i = 0; i < once; i++) { const time = oneList[currentIndex]; if (time) { fragment += `
${time.title}
`; } const hour = secondList[currentIndex]; if (hour) { fragment2 += `
${hour.title}
`; } currentIndex += 1; } if (fragment) { control.$playbackTimeListOne.insertAdjacentHTML('beforeend', fragment); } if (fragment2) { control.$playbackTimeListSecond.insertAdjacentHTML('beforeend', fragment2); } countRender += 1; if (countRender < loopCount) { control.rafId = window.requestAnimationFrame(render); } } render(); } // 一个小时一个间隔 function renderHourTimeDay(playbackList, control) { const minuteList = initHourMinList1(playbackList); const hourList = initHourList2(); renderTimeDay(minuteList, hourList, control); } // 半个小时一个间隔 function renderHalfHourTimeDay(playbackList, control) { const minuteList = initHalfHourMinSecondList1(playbackList); const hourList = initHalfHourList2(); renderTimeDay(minuteList, hourList, control); } // 10分钟一个间隔 function renderTenMinTimeDay(playbackList, control) { const minuteList = initTenMinSecondList1(playbackList); const hourList = initTenMinList2(); renderTimeDay(minuteList, hourList, control); } // 5分钟一个间隔 function renderFiveMinTimeDay(playbackList, control) { const minuteList = initFiveMinSecondList1(playbackList); const hourList = initFileMinList2(); renderTimeDay(minuteList, hourList, control); } function secondToTime(second) { const add0 = num => num < 10 ? `0${num}` : String(num); const hour = Math.floor(second / 3600); const min = Math.floor((second - hour * 3600) / 60); const sec = Math.floor(second - hour * 3600 - min * 60); return (hour > 0 ? [hour, min, sec] : [min, sec]).map(add0).join(':'); } function getPosFromEvent(control, duration, event) { const $playbackProgress = control.$playbackProgress; const { left } = $playbackProgress.getBoundingClientRect(); const eventLeft = isMobile() ? event.touches[0].clientX : event.pageX; const width = clamp(eventLeft - left, 0, $playbackProgress.clientWidth); const second = parseInt(width / $playbackProgress.clientWidth * duration, 10); const time = secondToTime(second); const percentage = clamp(width / $playbackProgress.clientWidth, 0, 1); return { second, time, width, percentage }; } function getPercentage(second, duration) { return clamp(second / duration, 0, 1); } function addClass(target, className) { return target.classList.add(className); } function removeClass(target, className) { return target.classList.remove(className); } function hasClass(target, className) { return target.classList.contains(className); } // append function append(parent, child) { if (child instanceof Element) { parent.appendChild(child); } else { parent.insertAdjacentHTML('beforeend', String(child)); } return parent.lastElementChild || parent.lastChild; } function setStyle(element, key, value) { if (element && element.style && isNotEmpty(key)) { element.style[key] = value; } return element; } // include from event function includeFromEvent(event, target) { // js 捕获事件冒泡路径。 return event.composedPath && event.composedPath().indexOf(target) > -1; } function removeElement(element) { let result = false; if (element) { if (element.parentNode) { element.parentNode.removeChild(element); result = true; } } return result; } var observer$1 = ((player, control) => { const { events: { proxy } } = player; const object = document.createElement('object'); object.setAttribute('aria-hidden', 'true'); object.setAttribute('tabindex', -1); object.type = 'text/html'; object.data = 'about:blank'; setStyle$1(object, { display: 'block', position: 'absolute', top: '0', left: '0', height: '100%', width: '100%', overflow: 'hidden', pointerEvents: 'none', zIndex: '-1' }); let playerWidth = player.width; let playerHeight = player.height; const resizeFn = throttle(() => { if (player.width !== playerWidth || player.height !== playerHeight) { playerWidth = player.width; playerHeight = player.height; player.emit(EVENTS.resize); screenfullH5Control(); } }, PLAYER_RESIZE_TIME); proxy(object, 'load', () => { proxy(object.contentDocument.defaultView, 'resize', () => { resizeFn(); }); }); player.$container.appendChild(object); player.on(EVENTS.destroy, () => { player.$container.removeChild(object); }); function setVolumeHandle(percentage) { if (percentage === 0) { setStyle$1(control.$volumeOn, 'display', 'none'); setStyle$1(control.$volumeOff, 'display', 'flex'); setStyle$1(control.$volumeHandle, 'top', `${48}px`); } else { if (control.$volumeHandle && control.$volumePanel) { const panelHeight = getStyle(control.$volumePanel, 'height') || 60; const handleHeight = getStyle(control.$volumeHandle, 'height'); const top = panelHeight - (panelHeight - handleHeight) * percentage - handleHeight; setStyle$1(control.$volumeHandle, 'top', `${top}px`); setStyle$1(control.$volumeOn, 'display', 'flex'); setStyle$1(control.$volumeOff, 'display', 'none'); } } control.$volumePanelText && (control.$volumePanelText.innerHTML = parseInt(percentage * 100)); } player.on(EVENTS.volumechange, () => { setVolumeHandle(player.volume); }); player.on(EVENTS.loading, flag => { setStyle$1(control.$loading, 'display', flag ? 'flex' : 'none'); if (isFalse(player._opt.backgroundLoadingShow) && isTrue(flag) || isFalse(flag)) { // hidden setStyle$1(control.$poster, 'display', 'none'); } if (flag) { setStyle$1(control.$playBig, 'display', 'none'); setStyle$1(control.$tipsMessage, 'display', 'none'); } if (!flag) { if (player._opt.extendDomConfig.showAfterLoading && control.$extendDom) { setStyle$1(control.$extendDom, 'display', 'block'); } } // playing if (!flag) { // 除了mse解码外。 if (player.getRenderType() === RENDER_TYPE.canvas && isFalse(player._opt.useMSE)) { handleRemoveLoadingBgImage(); } } }); // 主要是给 mse+ video 服务的。 player.on(EVENTS.removeLoadingBgImage, () => { // need delay to hidden,because video play not immediately show view. handleRemoveLoadingBgImage(); }); const handleRemoveLoadingBgImage = () => { if (control.$loadingBgImage && control.$loadingBg && control.$loadingBgImage.src) { player.debug && player.debug.log('Control', 'remove loading bg image'); control.$loadingBgImage.width = 0; control.$loadingBgImage.height = 0; control.$loadingBgImage.src = ''; setStyle$1(control.$loadingBg, 'display', 'none'); } }; const handleScreenfullChange = e => { // 全屏 if (player.fullscreen) { if (getTarget(e) === player.$container) { screenfullChange(); } } else { // 取消全屏 screenfullChange(); } }; const screenfullChange = fullscreen => { let isFullScreen = isBoolean(fullscreen) ? fullscreen : player.fullscreen; setStyle$1(control.$fullscreenExit, 'display', isFullScreen ? 'flex' : 'none'); setStyle$1(control.$fullscreen, 'display', isFullScreen ? 'none' : 'flex'); // control.autoSize(); }; const showPlaybackTFDom = () => { return player._opt.playType === PLAY_TYPE.playbackTF && player._opt.playbackConfig.showControl; }; const screenfullH5Control = fn => { if (isMobile() && control.$controls && player._opt.useWebFullScreen) { setTimeout(() => { if (player.fullscreen) { const controlHeight = showPlaybackTFDom() ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; let translateX = player.height / 2 - player.width + controlHeight / 2; let translateY = player.height / 2 - controlHeight / 2; control.$controls.style.transform = `translateX(${-translateX}px) translateY(-${translateY}px) rotate(-90deg)`; if (control.$zoomControls) { const controlsWidth = 156; const controlsHeight = 30; const controlsTranslateX = player.width / 2 + controlsWidth / 2 - controlsHeight / 2; control.$zoomControls.style.transform = `translateX(${-controlsTranslateX}px) translateY(${player.height / 2}px) rotate(-90deg)`; } if (control.$recording) { const recordingWidth = 101; const recordingHeight = 20; const recordingTranslateX = player.width / 2 + recordingWidth / 2 - recordingHeight / 2; control.$recording.style.transform = `translateX(${-recordingTranslateX}px) translateY(${player.height / 2}px) rotate(-90deg)`; } } else { control.$controls.style.transform = `translateX(0) translateY(0) rotate(0)`; if (control.$zoomControls) { control.$zoomControls.style.transform = `translateX(-50%) translateY(0) rotate(0)`; } if (control.$recording) { control.$recording.style.transform = `translateX(-50%) translateY(0) rotate(0)`; } } fn && fn(); }, 10); } }; try { screenfull.on('change', handleScreenfullChange); player.events.destroys.push(() => { screenfull.off('change', handleScreenfullChange); }); } catch (error) { // } // player.on(EVENTS.webFullscreen, value => { if (isMobile()) { screenfullChange(value); screenfullH5Control(() => { _resizePlaybackTime(); }); } }); player.on(EVENTS.recording, () => { if (player.playing) { setStyle$1(control.$record, 'display', player.recording ? 'none' : 'flex'); setStyle$1(control.$recordStop, 'display', player.recording ? 'flex' : 'none'); if (player._opt.hasControl || player._opt.isShowRecordingUI) { setStyle$1(control.$recording, 'display', player.recording ? 'flex' : 'none'); if (isFalse(player.recording) && control.$recordingTime) { control.$recordingTime.innerHTML = formatTimeTips(0); } } } }); // player.on(EVENTS.recordingTimestamp, timestamp => { // console.log(timestamp); control.$recordingTime && (control.$recordingTime.innerHTML = formatTimeTips(timestamp)); }); player.on(EVENTS.zooming, () => { if (player.playing) { setStyle$1(control.$zoom, 'display', player.zooming ? 'none' : 'flex'); setStyle$1(control.$zoomStop, 'display', player.zooming ? 'flex' : 'none'); if (player._opt.hasControl || player._opt.isShowZoomingUI) { setStyle$1(control.$zoomControls, 'display', player.zooming ? 'flex' : 'none'); } } }); player.on(EVENTS.playing, flag => { handlePlaying(flag); }); const handlePlaying = flag => { // playing if (flag) { setStyle$1(control.$play, 'display', 'none'); setStyle$1(control.$playBig, 'display', 'none'); } else { // not playing if (player.isPlayFailedAndPaused && isFalse(player._opt.playFailedAndPausedShowPlayBtn)) { setStyle$1(control.$play, 'display', 'none'); setStyle$1(control.$playBig, 'display', 'none'); } else { setStyle$1(control.$play, 'display', 'flex'); setStyle$1(control.$playBig, 'display', 'block'); } } // setStyle(control.$play, 'display', flag ? 'none' : 'flex'); // setStyle(control.$playBig, 'display', flag ? 'none' : 'block'); setStyle$1(control.$pause, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$screenshot, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$record, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$qualityMenu, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$volume, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$ptz, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$zoom, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$scaleMenu, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$faceDetect, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$objectDetect, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$occlusionDetect, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$controlHtml, 'display', flag ? 'flex' : 'none'); if (player.isPlayback()) { setStyle$1(control.$speedMenu, 'display', flag ? 'flex' : 'none'); } screenfullChange(); control.extendBtnList.forEach(item => { if (item.$iconWrap) { setStyle$1(item.$iconWrap, 'display', flag ? 'flex' : 'none'); } // 默认是隐藏的 if (item.$activeIconWrap) { setStyle$1(item.$activeIconWrap, 'display', 'none'); } }); if (player._opt.showPerformance) { setStyle$1(control.$performanceActive, 'display', flag ? 'flex' : 'none'); } else { setStyle$1(control.$performance, 'display', flag ? 'flex' : 'none'); setStyle$1(control.$performanceActive, 'display', 'none'); } // hidden setStyle$1(control.$poster, 'display', 'none'); setStyle$1(control.$ptzActive, 'display', 'none'); setStyle$1(control.$recordStop, 'display', 'none'); setStyle$1(control.$zoomStop, 'display', 'none'); setStyle$1(control.$faceDetectActive, 'display', 'none'); setStyle$1(control.$objectDetectActive, 'display', 'none'); // 不在播放 if (!flag) { control.$speed && (control.$speed.innerHTML = bpsSize('')); // zoom setStyle$1(control.$zoomControls, 'display', 'none'); // record setStyle$1(control.$recording, 'display', 'none'); // ptz if (control.$ptzControl) { control.$ptzControl.classList.remove('jb-pro-ptz-controls-show'); } } _resizePlaybackTime(); if (flag) { _playbackTimeOffset(); } }; // player.on(EVENTS.playbackPause, flag => { handlePlaying(!flag); }); player.on(EVENTS.kBps, rate => { const bps = bpsSize$2(rate); control.kbpsShow = bps; if (player._opt.showBandwidth) { control.$speed && (control.$speed.innerHTML = bps); _resizePlaybackTime(); } }); const _resizePlaybackTime = () => { if (showPlaybackTFDom()) { if (player._opt.playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.normal) { let width = control.controlsInnerRect.width - control.controlsLeftRect.width - control.controlsRightRect.width - control.controlsPlaybackBtnsRect.width; if (isMobile() && player.webFullscreen) { width = control.controlsInnerRect.height - control.controlsLeftRect.height - control.controlsRightRect.height - control.controlsPlaybackBtnsRect.height; } control.$playbackTimeInner.style.width = width + 'px'; } } }; // playback time offset const _playbackTimeOffset = () => { if (!showPlaybackTFDom() || player._opt.playbackConfig.controlType !== PLAYBACK_CONTROL_TYPE.normal) { return; } const leftPx = control.$playbackCurrentTime.style.left; let left = parseInt(leftPx, 10); const innerWidth = control.controlsPlaybackTimeInner.width; if (left - innerWidth / 2 > 0) { left = parseInt(left - innerWidth / 2, 10); } else { left = 0; } control.$playbackTimeInner.scrollLeft = left; }; // show playback operate if (showPlaybackTFDom()) { // 计算偏移量 const _calcCurrentTimeTextOffset = () => { if (showPlaybackTFDom()) { let left = 0; const playingTimestamp = player.playback && player.playback.playingTimestamp; if (playingTimestamp) { const nowDate = new Date(playingTimestamp); const hour = nowDate.getHours(); const min = nowDate.getMinutes(); const second = nowDate.getSeconds(); if (player.playback.is60Min) { left = hour * 60 + min; } else if (player.playback.is30Min) { left = (hour * 60 + min) * 2 + parseInt(second / 30, 10); } else if (player.playback.is10Min) { left = (hour * 60 + min) * 6 + parseInt(second / 10, 10); } else if (player.playback.is5Min) { left = (hour * 60 + min) * 12 + parseInt(second / 5, 10); } else if (player.playback.is1Min) { left = (hour * 60 + min) * 60 + parseInt(second, 10); } control.$playbackCurrentTime.style.left = left + 'px'; // console.log('playingTimestamp', playingTimestamp, 'hour is ', hour, 'min is', min, 'second is', second, 'left is ', left) } } }; // toggle playback button const _togglePlaybackButton = precision => { control.$playbackNarrow.classList.remove('disabled'); control.$playbackExpand.classList.remove('disabled'); if (precision === PLAYBACK_CONTROL_TIME_PRECISION.oneHour) { control.$playbackNarrow.classList.add('disabled'); } if (precision === PLAYBACK_CONTROL_TIME_PRECISION.fiveMin) { control.$playbackExpand.classList.add('disabled'); } }; // // if (player._opt.showBandwidth) { // control.$controlsLeft.style.width = '90px' // } // 播放时间 player.on(EVENTS.playbackTime, time => { if (player._opt.playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.normal) { control.$playbackCurrentTimeText && (control.$playbackCurrentTimeText.innerText = parseTime(time, "{h}:{i}:{s}")); _calcCurrentTimeTextOffset(); } else if (player._opt.playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.simple) { // console.log('playbackTime', time); const percentage = getPercentage(time, player.playback.totalDuration); control.$playbackProgressPlayed.style.width = `${percentage * 100}%`; control.$playbackProgressIndicator.style.left = `calc(${percentage * 100}% - ${14 / 2}px)`; control.$playbackProgressTime.innerText = `${secondToTime(time)} / ${secondToTime(player.playback.totalDuration)}`; } }); // 缩放类型 player.on(EVENTS.playbackPrecision, (precision, playbackList) => { if (!showPlaybackTFDom() || player._opt.playbackConfig.controlType !== PLAYBACK_CONTROL_TYPE.normal) { return; } // update class control.$playbackTimeScroll.classList.remove(PLAYBACK_CONTROL_TIME_PRECISION_CLASS.oneHour, PLAYBACK_CONTROL_TIME_PRECISION_CLASS.halfHour, PLAYBACK_CONTROL_TIME_PRECISION_CLASS.fiveMin, PLAYBACK_CONTROL_TIME_PRECISION_CLASS.tenMin); control.$playbackTimeScroll.classList.add(PLAYBACK_CONTROL_TIME_PRECISION_CLASS[precision]); if (control.rafId) { window.cancelAnimationFrame(control.rafId); control.rafId = null; } if (control.changePercisitionInterval) { clearTimeout(control.changePercisitionInterval); control.changePercisitionInterval = null; } // clear inner html control.$playbackTimeListOne.innerHTML = ''; control.$playbackTimeListSecond.innerHTML = ''; control.changePercisitionInterval = setTimeout(() => { control.$playbackTimeListOne.innerHTML = ''; control.$playbackTimeListSecond.innerHTML = ''; switch (precision) { case PLAYBACK_CONTROL_TIME_PRECISION.oneHour: renderHourTimeDay(playbackList, control); break; case PLAYBACK_CONTROL_TIME_PRECISION.halfHour: renderHalfHourTimeDay(playbackList, control); break; case PLAYBACK_CONTROL_TIME_PRECISION.tenMin: renderTenMinTimeDay(playbackList, control); break; case PLAYBACK_CONTROL_TIME_PRECISION.fiveMin: renderFiveMinTimeDay(playbackList, control); break; } _calcCurrentTimeTextOffset(); if (player._opt.playbackConfig.showPrecisionBtn) { _togglePlaybackButton(precision); } _playbackTimeOffset(); }, 16); // if (renderDom) { // control.$playbackTimeList.insertAdjacentHTML( // 'beforeend', // renderDom // ) // // } }); // resize player.on(EVENTS.resize, () => { _resizePlaybackTime(); }); // scroll player.on(EVENTS.playbackTimeScroll, () => { _playbackTimeOffset(); }); // _resizePlaybackTime(); } // quality if (player._opt.operateBtns.quality && player._opt.qualityConfig.length > 0) { player.on(EVENTS.streamQualityChange, value => { _changeStreamQuality(value); }); const _changeStreamQuality = value => { control.$qualityText.innerText = value; control.$qualityMenuItems.forEach($qualityMenuItem => { const quality = $qualityMenuItem.dataset.quality; $qualityMenuItem.classList.remove('jb-pro-quality-menu-item-active'); if (quality === value) { $qualityMenuItem.classList.add('jb-pro-quality-menu-item-active'); } }); }; const _renderQuality = () => { const qualityList = player._opt.qualityConfig || []; let qualityDom = ''; qualityList.forEach(item => { qualityDom += `
${item}
`; }); if (qualityDom) { control.$qualityMenuList.insertAdjacentHTML('beforeend', qualityDom); // Object.defineProperty(control, '$qualityMenuItems', { value: player.$container.querySelectorAll('.jb-pro-quality-menu-item') }); // init default quality } }; _renderQuality(); if (player.streamQuality) { _changeStreamQuality(player.streamQuality); } } // scale if (player._opt.operateBtns.scale && player._opt.scaleConfig.length > 0) { player.on(EVENTS.viewResizeChange, value => { _changeViewResize(value); }); const _changeViewResize = value => { const scaleTypeText = player._opt.scaleConfig[value]; control.$scaleText.innerText = scaleTypeText; control.$scaleMenuItems.forEach($scaleMenuItem => { const scale = $scaleMenuItem.dataset.scale; $scaleMenuItem.classList.remove('jb-pro-scale-menu-item-active'); if (toNumber(scale) === toNumber(value)) { $scaleMenuItem.classList.add('jb-pro-scale-menu-item-active'); } }); }; const _renderScale = () => { const scaleList = player._opt.scaleConfig || []; let scaleDom = ''; scaleList.forEach((item, index) => { scaleDom += `
${item}
`; }); if (scaleDom) { control.$scaleMenuList.insertAdjacentHTML('beforeend', scaleDom); // Object.defineProperty(control, '$scaleMenuItems', { value: player.$container.querySelectorAll('.jb-pro-scale-menu-item') }); // init default quality } }; _renderScale(); _changeViewResize(player.scaleType); } // playback rate ui if (player.isPlayback()) { if (player._opt.playbackConfig.showRateBtn && player._opt.playbackConfig.rateConfig.length > 0) { player.on(EVENTS.playbackRateChange, value => { _changePlaybackSpeed(value); }); const _changePlaybackSpeed = value => { const speedConfig = player._opt.playbackConfig.rateConfig; const speedItem = speedConfig.find(item => { return toNumber(item.value) === toNumber(value); }); if (speedItem) { control.$speedText.innerText = speedItem.label; control.$speedMenuItems.forEach($speedMenuItem => { const speed = $speedMenuItem.dataset.speed; $speedMenuItem.classList.remove('jb-pro-speed-menu-item-active'); if (toNumber(speed) === toNumber(value)) { $speedMenuItem.classList.add('jb-pro-speed-menu-item-active'); } }); } }; const _renderPlaybackSpeed = () => { const speedList = player._opt.playbackConfig.rateConfig; let scaleDom = ''; speedList.forEach((item, index) => { scaleDom += `
${item.label}
`; }); if (scaleDom) { control.$speedMenuList.insertAdjacentHTML('beforeend', scaleDom); // Object.defineProperty(control, '$speedMenuItems', { value: player.$container.querySelectorAll('.jb-pro-speed-menu-item') }); // init default quality } }; _renderPlaybackSpeed(); const defaultRate = player.playback ? player.playback.playbackRate : 1; _changePlaybackSpeed(defaultRate); } } // player.on(EVENTS.stats, function () { let stats = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (player._opt.showPerformance) { setStyle$1(control.$performancePanel, 'display', 'block'); control.$performancePanel.innerHTML = ''; const versionTime = proVersionTime; const performanceMemory = getPerformanceMemory(); const cpuLevel = player.getCpuLevel(); const cpuLevelTitle = isNotEmpty(cpuLevel) && cpuLevel !== -1 ? `${CPU_LEVEL[cpuLevel]}` : ''; const videoInfo = player.video && player.video.videoInfo || {}; const audioInfo = player.audio && player.audio.audioInfo || {}; const times = player._times || {}; const renderType = player.getRenderType(); const canvasRenderTpe = player.getCanvasRenderType(); const decoderType = player.getDecodeType(); const demuxType = player.getDemuxType(); const streamType = player.getStreamType(); const audioEngine = player.getAudioEngineType(); let recordingDuration = player.getRecordingDuration(); let recordingTotalSize = player.getRecordingByteLength(); const isAudioPlaybackRateSpeed = player.isAudioPlaybackRateSpeed(); const videoIframeIntervalTs = player.videoIframeIntervalTs; recordingDuration = formatTimeTips(recordingDuration); recordingTotalSize = formatFileSize(recordingTotalSize); const playType = player.isPlayback() ? '录播' : '直播'; let isDropping = stats.isDropping; // let isWebrtcH264 = player.isWebrtcH264(); const isMSEDecodeAudio = player._opt.useMSE && player._opt.mseDecodeAudio; const kbpsShow = player.control ? player.control.kbpsShow : '0 KB/s'; const videoPlaybackQuality = player.getVideoPlaybackQuality(); const renderDom = `
版本 ${versionTime}
${player._opt.isMulti ? `
UUid ${player._opt.debugUuid}
` : ''} ${player.isInMulti() ? `
窗口下标 ${player._opt.multiIndex}
` : ''} ${performanceMemory ? `
内存大小限制 ${formatFileSize(performanceMemory.jsHeapSizeLimit)}
可使用的内存 ${formatFileSize(performanceMemory.totalJSHeapSize)}
已使用的内存 ${formatFileSize(performanceMemory.usedJSHeapSize)}
` : ''} ${cpuLevelTitle ? `
CPU压力情况 ${cpuLevelTitle}
` : ''} ${performanceMemory && performanceMemory.usedJSHeapSize > performanceMemory.totalJSHeapSize ? `
可能内存泄漏 是}
` : ''}
播放模式 ${playType}
${player.isPlayback() ? `
播放倍率 ${player.playback.rate}倍
播放模式 ${player.playback.isUseFpsRender ? '固定FPS' : '动态FPS'}
${player.playback.isUseFpsRender ? `
固定FPS ${player.video.getStreamFps()}
` : ''} ` : ""}
解封装模式 ${DEMUX_TYPE_SHOW[demuxType]}
解码模式 ${decoderType}
渲染组件 ${renderType}
${renderType === RENDER_TYPE.canvas ? `
渲染引擎 ${canvasRenderTpe}
` : ''}
网络请求组件 ${streamType}
视频格式 ${videoInfo.encType || '-'}
视频(宽x高) ${videoInfo.width || '-'}x${videoInfo.height || '-'}
${player.isPlayer() ? `
视频GOP(ms) ${videoIframeIntervalTs || '-'}
` : ''}
音频格式 ${AUDIO_ENC_CODE_SHOW[audioInfo.encType] || '-'}
音频引擎 ${audioEngine || '-'}
音频通道 ${audioInfo.channels || '-'}
音频采样率 ${audioInfo.sampleRate || '-'}
${player.isPlayer() ? `
播放器初始化(ms) ${times.playTimestamp}
开始请求地址(ms) ${times.streamTimestamp}
请求响应(ms) ${times.streamResponseTimestamp}
解封装(ms) ${times.demuxTimestamp}
解码(ms) ${times.decodeTimestamp}
页面开始渲染(ms) ${times.videoTimestamp}
初始化到页面渲染(ms) ${times.allTimestamp}
${player.recording ? `
视频录制时间 ${recordingDuration}
视频录制大小 ${recordingTotalSize}
` : ''} ` : ''}
音频码率(bit) ${stats.abps}
视频码率(bit) ${stats.vbps}
视频帧率(fps) ${stats.fps}
视频峰值帧率(fps) ${stats.maxFps}
解码帧率(fps) ${stats.dfps}
音频缓冲帧 ${stats.audioBuffer}
音频缓冲时长(ms) ${stats.audioBufferDelayTs}
${player.isPlayer() ? `
视频待解码帧 ${stats.demuxBuffer}
` : `
缓存时长(ms) ${stats.playbackCacheDataDuration}
视频待渲染帧 ${stats.playbackVideoBuffer}
视频待解码帧 ${stats.demuxBuffer}
音频待解码帧 ${stats.audioDemuxBuffer}
`}
待解封装数据(byte) ${stats.flvBuffer}
${player._opt.useMSE ? `
MSE缓冲时长(ms) ${stats.mseDelay}
MSE待解码帧 ${stats.msePendingBuffer}
MSE缓存时长(s) ${stats.mseStore}
MSE解码间隔(ms) ${stats.mseDecodeDiffTimes}
MSE解码时间(ms) ${stats.mseTs}
MSE播放模式 ${stats.mseDecodePlaybackRate > 1 ? '加速' : '正常'}
` : ''} ${player._opt.useWCS ? `
WCS解码间隔(ms) ${stats.wcsDecodeDiffTimes}
` : ''} ${player.isOldHls() ? `
HLS缓冲时长(ms) ${stats.hlsDelay}
` : ''} ${player.isUseHls265() ? `
HLS缓冲时长(ms) ${stats.hlsDelay}
HLS待解码帧 ${stats.hlsDemuxLength}
HLS待解码视频帧 ${stats.hlsDemuxVideoLength}
HLS待解码音频帧 ${stats.hlsDemuxAudioLength}
` : ''} ${player.isPlayer() && videoPlaybackQuality ? `
Video已渲染帧 ${videoPlaybackQuality.renderedVideoFrames}
Video已丢弃帧 ${videoPlaybackQuality.droppedVideoFrames}
` : ''} ${player.isPlayer() ? `
网络延迟(ms) ${stats.netBuf}
缓冲时长(ms) ${stats.buf}
最新缓冲时长(ms) ${stats.pushLatestDelay}
` : ''} ${player._opt.useMSE || player.isWebrtcH264() || player.isAliyunRtc() ? `
video显示时间(s) ${stats.videoCurrentTime}
video间隔时间(s) ${stats.videoCurrentTimeDiff}
videoBuffer缓存时间(ms) ${stats.mseVideoBufferDelayTime}
` : ''}
视频显示时间(ms) ${stats.currentPts || stats.ts}
${player._opt.hasAudio && player.isAudioNotMute() && isFalse(isMSEDecodeAudio) ? `
音频显示时间(ms) ${stats.audioTs}
${player._opt.hasVideo ? `
音视频同步时间戳(ms) ${stats.audioSyncVideo}
` : ''}
音频播放模式 ${isAudioPlaybackRateSpeed ? '加速' : '正常'}
` : ''}
视频解码时间(ms) ${stats.dts}
${player.isPlayer() ? `
解码前-解码后延迟(ms) ${stats.delayTs}
总延迟(网络+解码)(ms) ${stats.totalDelayTs}
` : ''} ${player.isPlayer() && stats.isStreamTsMoreThanLocal ? `
是否超过一倍率推流 是
` : ''} ${player.isPlayer() ? `
是否播放流畅 ${stats.videoSmooth}
` : ''} ${player.isPlayer() ? `
是否在丢帧 ${isDropping}
` : ''}
网速 ${kbpsShow}
播放时长(s) ${formatTimeTips(stats.pTs)}
`; control.$performancePanel.insertAdjacentHTML('beforeend', renderDom); } else { control.$performancePanel.innerHTML = ''; setStyle$1(control.$performancePanel, 'display', 'none'); } }); player.on(EVENTS.togglePerformancePanel, flag => { setStyle$1(control.$performance, 'display', flag ? 'none' : 'flex'); setStyle$1(control.$performanceActive, 'display', flag ? 'flex' : 'none'); }); player.on(EVENTS.faceDetectActive, flag => { setStyle$1(control.$faceDetect, 'display', flag ? 'none' : 'flex'); setStyle$1(control.$faceDetectActive, 'display', flag ? 'flex' : 'none'); }); player.on(EVENTS.objectDetectActive, flag => { setStyle$1(control.$objectDetect, 'display', flag ? 'none' : 'flex'); setStyle$1(control.$objectDetectActive, 'display', flag ? 'flex' : 'none'); }); player.on(EVENTS.occlusionDetectActive, flag => { setStyle$1(control.$occlusionDetect, 'display', flag ? 'none' : 'flex'); setStyle$1(control.$occlusionDetectActive, 'display', flag ? 'flex' : 'none'); }); }); var property = ((player, control) => { Object.defineProperty(control, 'controlsRect', { get: () => { return control.$controls.getBoundingClientRect(); } }); Object.defineProperty(control, 'controlsInnerRect', { get: () => { return control.$controlsInner.getBoundingClientRect(); } }); Object.defineProperty(control, 'controlsLeftRect', { get: () => { return control.$controlsLeft.getBoundingClientRect(); } }); Object.defineProperty(control, 'controlsRightRect', { get: () => { return control.$controlsRight.getBoundingClientRect(); } }); Object.defineProperty(control, 'controlsPlaybackTimeInner', { get: () => { return control.$playbackTimeInner && control.$playbackTimeInner.getBoundingClientRect() || {}; } }); Object.defineProperty(control, 'controlsPlaybackBtnsRect', { get: () => { return control.$controlsPlaybackBtns && control.$controlsPlaybackBtns.getBoundingClientRect() || { width: 0 }; } }); }); var events = ((player, control) => { const { events: { proxy }, debug } = player; // const options = player._opt; const operateBtns = options.operateBtns; function volumeChangeFromEvent(event) { const { bottom: panelBottom, height: panelHeight } = control.$volumePanel.getBoundingClientRect(); const { height: handleHeight } = control.$volumeHandle.getBoundingClientRect(); let moveLen = event.y; // if (isMobile() && player.fullscreen) { // moveLen = event.x; // } const percentage = clamp(panelBottom - moveLen - handleHeight / 2, 0, panelHeight - handleHeight / 2) / (panelHeight - handleHeight); return percentage; } // 右键菜单 for pc if (isPc()) { // proxy(window, ['click', 'contextmenu'], event => { if (event.composedPath().indexOf(player.$container) > -1) { control.isFocus = true; } else { control.isFocus = false; } }); } proxy(control.$controls, 'click', e => { e.stopPropagation(); }); if (operateBtns.play) { proxy(control.$pause, 'click', e => { if (options.playType === PLAY_TYPE.playbackTF && options.playbackConfig.uiUsePlaybackPause) { player.playbackPause = true; // player.emit(EVENTS.playbackPauseOrResume, true); } else { if (isFunction$1(operateBtns.pauseFn)) { operateBtns.pauseFn(); } else { player.pauseForControl(); } } }); // 监听 play 方法 proxy(control.$play, 'click', e => { if (options.playType === PLAY_TYPE.playbackTF && player.playbackPause) { player.playbackPause = false; // player.emit(EVENTS.playbackPauseOrResume, false); } else { if (isFunction$1(operateBtns.playFn)) { operateBtns.playFn(); } else { player.playForControl().then(() => { player.resumeAudioAfterPause(); }); } } }); } // 监听 play 方法 proxy(control.$playBig, 'click', e => { if (options.playType === PLAY_TYPE.playbackTF && player.playbackPause) { player.playbackPause = false; // player.emit(EVENTS.playbackPauseOrResume, false); } else { if (isFunction$1(operateBtns.playFn)) { operateBtns.playFn(); } else { player.playForControl().then(() => { player.resumeAudioAfterPause(); }); } } }); if (operateBtns.screenshot) { proxy(control.$screenshot, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.screenshotFn)) { operateBtns.screenshotFn(); } else { player.video.screenshot(); } }); } if (operateBtns.audio) { // audio panel if (isPc()) { proxy(control.$volume, 'mouseover', () => { control.$volumePanelWrap.classList.add('jb-pro-volume-panel-wrap-show'); }); proxy(control.$volume, 'mouseout', () => { control.$volumePanelWrap.classList.remove('jb-pro-volume-panel-wrap-show'); }); proxy(control.$volumePanel, 'click', event => { event.stopPropagation(); player.volume = volumeChangeFromEvent(event); }); proxy(control.$volumeHandle, 'mousedown', event => { event.stopPropagation(); control.isVolumeDroging = true; }); proxy(control.$volumeHandle, 'mousemove', event => { if (control.isVolumeDroging) { player.volume = volumeChangeFromEvent(event); } }); proxy(document, 'mouseup', () => { if (control.isVolumeDroging) { control.isVolumeDroging = false; } }); } proxy(control.$volumeOn, 'click', e => { e.stopPropagation(); setStyle$1(control.$volumeOn, 'display', 'none'); setStyle$1(control.$volumeOff, 'display', 'block'); const lastVolume = player.volume; player.volume = 0; // inner player._lastVolume = isPc() ? lastVolume : 1; }); proxy(control.$volumeOff, 'click', e => { e.stopPropagation(); setStyle$1(control.$volumeOn, 'display', 'block'); setStyle$1(control.$volumeOff, 'display', 'none'); player.volume = isPc() ? player.lastVolume || 0.5 : 1; }); } if (operateBtns.record) { proxy(control.$record, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.recordFn)) { operateBtns.recordFn(); } else { player.recording = true; } }); proxy(control.$recordStop, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.recordStopFn)) { operateBtns.recordStopFn(); } else { player.recording = false; } }); } proxy(control.$recordingStop, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.recordStopFn)) { operateBtns.recordStopFn(); } else { player.recording = false; } }); if (operateBtns.fullscreen) { proxy(control.$fullscreen, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.fullscreenFn)) { operateBtns.fullscreenFn(); } else { player.fullscreen = true; } }); proxy(control.$fullscreenExit, 'click', e => { e.stopPropagation(); if (isFunction$1(operateBtns.fullscreenExitFn)) { operateBtns.fullscreenExitFn(); } else { player.fullscreen = false; } }); } if (operateBtns.ptz) { proxy(control.$ptz, 'click', e => { e.stopPropagation(); setStyle$1(control.$ptzActive, 'display', 'flex'); setStyle$1(control.$ptz, 'display', 'none'); control.$ptzControl.classList.add('jb-pro-ptz-controls-show'); }); proxy(control.$ptzActive, 'click', e => { e.stopPropagation(); setStyle$1(control.$ptz, 'display', 'flex'); setStyle$1(control.$ptzActive, 'display', 'none'); control.$ptzControl.classList.remove('jb-pro-ptz-controls-show'); }); // 绑定在 ptz 按钮上面 才会出现的。 control.$ptzArrows.forEach($ptzArrow => { if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.click) { proxy($ptzArrow, 'click', e => { e.stopPropagation(); const target = e.currentTarget; const dataset = target.dataset; const arrow = dataset.arrow; control.$ptzBgActive.classList.add('jb-pro-ptz-bg-active-show'); control.$ptzBgActive.classList.add(`jb-pro-ptz-bg-active-${arrow}`); control.$ptzControlCircular.classList.add(`jb-pro-ptz-control-${arrow}`); player.emit(EVENTS.ptz, convertToCamelCase(arrow)); setTimeout(() => { control.$ptzBgActive.classList.remove('jb-pro-ptz-bg-active-show'); PTZ_ARROW.forEach(arrow => { control.$ptzBgActive.classList.remove(`jb-pro-ptz-bg-active-${arrow}`); control.$ptzControlCircular.classList.remove(`jb-pro-ptz-control-${arrow}`); }); player.emit(EVENTS.ptz, PTZ_OBJ.stop); }, options.ptzStopEmitDelay * 1000); }); } else if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.mouseDownAndUp) { let isPtzMouseDown = false; proxy($ptzArrow, 'mousedown', e => { e.stopPropagation(); isPtzMouseDown = true; const target = e.currentTarget; const dataset = target.dataset; const arrow = dataset.arrow; control.$ptzBgActive.classList.add('jb-pro-ptz-bg-active-show'); control.$ptzBgActive.classList.add(`jb-pro-ptz-bg-active-${arrow}`); control.$ptzControlCircular.classList.add(`jb-pro-ptz-control-${arrow}`); player.emit(EVENTS.ptz, convertToCamelCase(arrow)); }); const ptzStop = () => { isPtzMouseDown = false; control.$ptzBgActive.classList.remove('jb-pro-ptz-bg-active-show'); PTZ_ARROW.forEach(arrow => { control.$ptzBgActive.classList.remove(`jb-pro-ptz-bg-active-${arrow}`); control.$ptzControlCircular.classList.remove(`jb-pro-ptz-control-${arrow}`); }); player.emit(EVENTS.ptz, PTZ_OBJ.stop); }; proxy($ptzArrow, 'mouseup', e => { e.stopPropagation(); if (isPtzMouseDown) { ptzStop(); } }); proxy(window, 'mouseup', e => { e.stopPropagation(); if (isPtzMouseDown) { ptzStop(); } }); } }); if (options.ptzZoomShow) { if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.click) { proxy(control.$ptzExpand, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.zoomExpand); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.stop); }, options.ptzStopEmitDelay * 1000); }); proxy(control.$ptzNarrow, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.zoomNarrow); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.stop); }, options.ptzStopEmitDelay * 1000); }); } else if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.mouseDownAndUp) { let isPtzExpandMouseDown = false; let isPtzNarrowMouseDown = false; proxy(control.$ptzExpand, 'mousedown', e => { e.stopPropagation(); isPtzExpandMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.zoomExpand); }); proxy(control.$ptzNarrow, 'mousedown', e => { e.stopPropagation(); isPtzNarrowMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.zoomNarrow); }); const ptzStop = () => { isPtzExpandMouseDown = false; isPtzNarrowMouseDown = false; player.emit(EVENTS.ptz, PTZ_OBJ.stop); }; proxy(control.$ptzExpand, 'mouseup', e => { e.stopPropagation(); if (isPtzExpandMouseDown) { ptzStop(); } }); proxy(control.$ptzNarrow, 'mouseup', e => { e.stopPropagation(); if (isPtzNarrowMouseDown) { ptzStop(); } }); proxy(window, 'mouseup', e => { e.stopPropagation(); if (isPtzExpandMouseDown || isPtzNarrowMouseDown) { ptzStop(); } }); } } if (options.ptzApertureShow) { if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.click) { proxy(control.$ptzApertureFar, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.apertureFar); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }, options.ptzStopEmitDelay * 1000); }); proxy(control.$ptzApertureNear, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.apertureNear); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }, options.ptzStopEmitDelay * 1000); }); } else if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.mouseDownAndUp) { let isPtzApertureFarMouseDown = false; let isPtzApertureNearMouseDown = false; proxy(control.$ptzApertureFar, 'mousedown', e => { e.stopPropagation(); isPtzApertureFarMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.apertureFar); }); proxy(control.$ptzApertureNear, 'mousedown', e => { e.stopPropagation(); isPtzApertureNearMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.apertureNear); }); const ptzStop = () => { isPtzApertureFarMouseDown = false; isPtzApertureNearMouseDown = false; player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }; proxy(control.$ptzApertureFar, 'mouseup', e => { e.stopPropagation(); if (isPtzApertureFarMouseDown) { ptzStop(); } }); proxy(control.$ptzApertureNear, 'mouseup', e => { e.stopPropagation(); if (isPtzApertureNearMouseDown) { ptzStop(); } }); proxy(window, 'mouseup', e => { e.stopPropagation(); if (isPtzApertureFarMouseDown || isPtzApertureNearMouseDown) { ptzStop(); } }); } } if (options.ptzFocusShow) { if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.click) { proxy(control.$ptzFocusFar, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.focusFar); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }, options.ptzStopEmitDelay * 1000); }); proxy(control.$ptzFocusNear, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.focusNear); setTimeout(() => { player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }, options.ptzStopEmitDelay * 1000); }); } else if (options.ptzClickType === PTZ_ACTIVE_EVENT_TYPE.mouseDownAndUp) { let isPtzFocusFarMouseDown = false; let isPtzFocusNearMouseDown = false; proxy(control.$ptzFocusFar, 'mousedown', e => { e.stopPropagation(); isPtzFocusFarMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.focusFar); }); proxy(control.$ptzFocusNear, 'mousedown', e => { e.stopPropagation(); isPtzFocusNearMouseDown = true; player.emit(EVENTS.ptz, PTZ_OBJ.focusNear); }); const ptzStop = () => { isPtzFocusFarMouseDown = false; isPtzFocusNearMouseDown = false; player.emit(EVENTS.ptz, PTZ_OBJ.fiStop); }; proxy(control.$ptzFocusFar, 'mouseup', e => { e.stopPropagation(); if (isPtzFocusFarMouseDown) { ptzStop(); } }); proxy(control.$ptzFocusNear, 'mouseup', e => { e.stopPropagation(); if (isPtzFocusNearMouseDown) { ptzStop(); } }); proxy(window, 'mouseup', e => { e.stopPropagation(); if (isPtzFocusFarMouseDown || isPtzFocusNearMouseDown) { ptzStop(); } }); } } if (options.ptzCruiseShow) { proxy(control.$ptzCruisePlay, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.cruiseStart); }); proxy(control.$ptzCruisePause, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.cruiseStop); }); } if (options.ptzFogShow) { proxy(control.$ptzFogOpen, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.fogOpen); }); proxy(control.$ptzFogClose, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.fogClose); }); } if (options.ptzWiperShow) { proxy(control.$ptzWiperOpen, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.wiperOpen); }); proxy(control.$ptzWiperClose, 'click', e => { e.stopPropagation(); player.emit(EVENTS.ptz, PTZ_OBJ.wiperClose); }); } // 支持拖拽 if (options.ptzSupportDraggable) { control.isPtzControlDroging = false; control.tempPtzPosition = { x: 0, y: 0 }; proxy(control.$ptzControl, isMobile() ? 'touchstart' : 'mousedown', event => { event.stopPropagation(); control.isPtzControlDroging = true; control.$ptzControl.style.cursor = 'grabbing'; const { posX, posY } = getMousePosition(event); control.tempPtzPosition = { x: posX, y: posY }; }); proxy(control.$ptzControl, isMobile() ? 'touchmove' : 'mousemove', event => { if (control.isPtzControlDroging) { event.stopPropagation(); const { posX, posY } = getMousePosition(event); const x = control.tempPtzPosition.x - posX; const y = control.tempPtzPosition.y - posY; // offsetLeft取到的值是数值型的,只能取值,不能赋值。 control.$ptzControl.style.left = `${control.$ptzControl.offsetLeft - x}px`; control.$ptzControl.style.top = `${control.$ptzControl.offsetTop - y}px`; control.tempPtzPosition = { x: posX, y: posY }; } }); function handleMouseUp() { if (control.isPtzControlDroging) { control.isPtzControlDroging = false; control.$ptzControl.style.cursor = 'grab'; control.tempPtzPosition = { x: 0, y: 0 }; } } proxy(control.$ptzControl, isMobile() ? 'touchend' : 'mouseup', event => { event.stopPropagation(); handleMouseUp(); }); proxy(window, isMobile() ? 'touchend' : 'mouseup', event => { event.stopPropagation(); handleMouseUp(); }); } } if (operateBtns.performance) { proxy(control.$performance, 'click', e => { e.stopPropagation(); player.togglePerformancePanel(true); }); proxy(control.$performanceActive, 'click', e => { e.stopPropagation(); player.togglePerformancePanel(false); }); } if (operateBtns.logSave) { proxy(control.$logSave, 'click', e => { e.stopPropagation(); player.downloadMemoryLog(); }); } if (operateBtns.aiFace) { proxy(control.$faceDetect, 'click', e => { e.stopPropagation(); player.faceDetect(true); }); proxy(control.$faceDetectActive, 'click', e => { e.stopPropagation(); player.faceDetect(false); }); } if (operateBtns.aiObject) { proxy(control.$objectDetect, 'click', e => { e.stopPropagation(); player.objectDetect(true); }); proxy(control.$objectDetectActive, 'click', e => { e.stopPropagation(); player.objectDetect(false); }); } if (operateBtns.aiOcclusion) { proxy(control.$occlusionDetect, 'click', e => { e.stopPropagation(); player.occlusionDetect(true); }); proxy(control.$occlusionDetectActive, 'click', e => { e.stopPropagation(); player.occlusionDetect(false); }); } if (player._opt.hasControl && player._opt.controlAutoHide) { // proxy(player.$container, 'mouseover', () => { if (!player.fullscreen) { setStyle$1(control.$controls, 'display', 'block'); startDelayControlHidden(); } }); proxy(player.$container, 'mousemove', () => { if (player.$container && control.$controls) { if (!player.fullscreen) { if (control.$controls.style.display === 'none') { setStyle$1(control.$controls, 'display', 'block'); startDelayControlHidden(); } } else { if (control.$controls.style.display === 'none') { setStyle$1(control.$controls, 'display', 'block'); startDelayControlHidden(); } } } }); proxy(player.$container, 'mouseout', () => { stopDelayControlHidden(); setStyle$1(control.$controls, 'display', 'none'); }); let delayHiddenTimeout = null; const startDelayControlHidden = () => { stopDelayControlHidden(); delayHiddenTimeout = setTimeout(() => { setStyle$1(control.$controls, 'display', 'none'); }, 5 * 1000); }; const stopDelayControlHidden = () => { if (delayHiddenTimeout) { clearTimeout(delayHiddenTimeout); delayHiddenTimeout = null; } }; } // show playback operate // 录像流监听方法 if (player._opt.playType === PLAY_TYPE.playbackTF) { let playbackControlType = player._opt.playbackConfig.controlType; if (player._opt.playbackConfig.showRateBtn) { proxy(control.$speedMenu, 'mouseover', () => { control.$speedMenuList.classList.add('jb-pro-speed-menu-shown'); }); proxy(control.$speedMenu, 'mouseout', () => { control.$speedMenuList.classList.remove('jb-pro-speed-menu-shown'); }); proxy(control.$speedMenuList, 'click', e => { const target = getTarget(e); if (target.matches('div.jb-pro-speed-menu-item')) { const dataset = target.dataset; player.emit(EVENTS.playbackPreRateChange, dataset.speed); } }); } if (playbackControlType === PLAYBACK_CONTROL_TYPE.normal) { // proxy(control.$playbackNarrow, 'click', e => { e.stopPropagation(); if (player.playback) { player.playback.narrowPrecision(); } }); proxy(control.$playbackExpand, 'click', e => { e.stopPropagation(); if (player.playback) { player.playback.expandPrecision(); } }); // proxy(control.$playbackTimeList, 'click', e => { const target = getTarget(e); if (target.matches('div.jb-pro-playback-time-minute-one')) { if (player.playback) { player.playback.seek(target.dataset); } } }); // 支持滚动轴 if (player._opt.playbackConfig.supportWheel) { // proxy(control.$playbackTimeInner, 'wheel', event => { event.preventDefault(); const delta = event.wheelDelta ? event.wheelDelta / 120 : -(event.detail || 0) / 3; // 向下 if (delta > 0) { if (player.playback) { player.playback.expandPrecision(); } } // 向上 else { if (player.playback) { player.playback.narrowPrecision(); } } }); } } else if (playbackControlType === PLAYBACK_CONTROL_TYPE.simple) { control.isDroging = false; // proxy(control.$playbackProgress, 'click', event => { if (event.target !== control.$playbackProgressIndicator) { let percentage = 0; let second = 0; // 看下是否被旋转了。 if (player.isInWebFullscreen()) { percentage = event.touches[0].clientY / player.height; second = percentage * player.playback.totalDuration; } else { const position = getPosFromEvent(control, player.playback.totalDuration, event); percentage = position.percentage; second = position.second; } if (player.playback) { player.playback.seek({ time: second }); } } }); proxy(control.$playbackProgress, 'mousemove', event => { setStyle$1(control.$playbackProgressTip, 'display', 'block'); const { width, time } = getPosFromEvent(control, player.playback.totalDuration, event); control.$playbackProgressTip.innerHTML = time; const tipWidth = control.$playbackProgressTip.clientWidth; if (width <= tipWidth / 2) { setStyle$1(control.$playbackProgressTip, 'left', 0); } else if (width > control.$playbackProgress.clientWidth - tipWidth / 2) { setStyle$1(control.$playbackProgressTip, 'left', `${control.$playbackProgress - tipWidth}px`); } else { setStyle$1(control.$playbackProgressTip, 'left', `${width - tipWidth / 2}px`); } }); proxy(control.$playbackProgress, 'mouseout', () => { setStyle$1(control.$playbackProgressTip, 'display', 'none'); }); proxy(control.$playbackProgressIndicator, 'mousedown', event => { control.isDroging = true; }); proxy(control.$playbackProgress, 'mousemove', event => { if (control.isDroging) { const { second, percentage } = getPosFromEvent(control, player.playback.totalDuration, event); if (player.playback) { player.playback.seek({ time: second }); } } }); proxy(control.$playbackProgress, 'mouseup', event => { if (control.isDroging) { control.isDroging = false; } }); } } // if (operateBtns.quality) { proxy(control.$qualityMenu, 'mouseover', () => { control.$qualityMenuList.classList.add('jb-pro-quality-menu-shown'); }); proxy(control.$qualityMenu, 'mouseout', () => { control.$qualityMenuList.classList.remove('jb-pro-quality-menu-shown'); }); proxy(control.$qualityMenuList, 'click', e => { const target = getTarget(e); if (target.matches('div.jb-pro-quality-menu-item')) { const dataset = target.dataset; player.streamQuality = dataset.quality; } }); } if (operateBtns.scale) { proxy(control.$scaleMenu, 'mouseover', () => { control.$scaleMenuList.classList.add('jb-pro-scale-menu-shown'); }); proxy(control.$scaleMenu, 'mouseout', () => { control.$scaleMenuList.classList.remove('jb-pro-scale-menu-shown'); }); proxy(control.$scaleMenuList, 'click', e => { const target = getTarget(e); if (target.matches('div.jb-pro-scale-menu-item')) { const dataset = target.dataset; player.setScaleMode(dataset.scale); } }); } if (operateBtns.zoom) { proxy(control.$zoom, 'click', e => { e.stopPropagation(); player.zooming = true; }); proxy(control.$zoomStop, 'click', e => { e.stopPropagation(); player.zooming = false; }); } proxy(control.$zoomExpand, 'click', e => { e.stopPropagation(); if (player.zoom) { player.zoom.expandPrecision(); } }); proxy(control.$zoomNarrow, 'click', e => { e.stopPropagation(); if (player.zoom) { player.zoom.narrowPrecision(); } }); proxy(control.$zoomStop2, 'click', e => { e.stopPropagation(); player.zooming = false; }); if (operateBtns.close) { proxy(control.$close, 'click', e => { e.stopPropagation(); player.doDestroy(); }); } proxy(control.$tipsMessageClose, 'click', e => { e.stopPropagation(); // clear and hidden control.$tipsMessageContent.innerHTML = ''; setStyle$1(control.$tipsMessage, 'display', 'none'); }); }); function styleInject(css, ref) { if ( ref === void 0 ) ref = {}; var insertAt = ref.insertAt; if (!css || typeof document === 'undefined') { return; } var head = document.head || document.getElementsByTagName('head')[0]; var style = document.createElement('style'); style.type = 'text/css'; if (insertAt === 'top') { if (head.firstChild) { head.insertBefore(style, head.firstChild); } else { head.appendChild(style); } } else { head.appendChild(style); } if (style.styleSheet) { style.styleSheet.cssText = css; } else { style.appendChild(document.createTextNode(css)); } } var css_248z$1 = "@keyframes rotation{0%{-webkit-transform:rotate(0deg)}to{-webkit-transform:rotate(1turn)}}@keyframes magentaPulse{0%{background-color:#630030;-webkit-box-shadow:0 0 9px #333}50%{background-color:#a9014b;-webkit-box-shadow:0 0 18px #a9014b}to{background-color:#630030;-webkit-box-shadow:0 0 9px #333}}.jb-pro-container video::-webkit-media-controls{display:none!important}.jb-pro-container .jb-pro-icon{cursor:pointer;width:16px;height:16px;display:inline-block}.jb-pro-container .jb-pro-ptz-controls{position:absolute;width:156px;height:156px;visibility:hidden;opacity:0;border-radius:78px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAATgAAAE4BAMAAAA9UfJZAAAAMFBMVEUAAABHcEy0tLRZWVmysrKoqKi1tbWvr6+2traBgYG1tbWWlpa1tbW1tbVUVFS1tbVGCHqkAAAAD3RSTlMzAO9U3LSWySp3aZcVRDUDw823AAAJYUlEQVR42u3d32sbVxYH8EPHxgg/lBsa7SBkukmpSbwLI2KbEPpgZ5MQtwmM0wRMmgdhP6RgEuwlSVnYlmGMYaEvMU1KKX4QNq0pocVmm7CYfRBaQguFpbgPKRSC/4V2LGliO+bulWKrkvVrftyZ+WbxeTRG+nDnnnNmRjP3EpMR6tMH18du/0Xj1tGz5+9cf/DUlPKx5PsTkr8s3eZ1cX7ym1zkuI/f1wTFunNt9fP+FIno7/98/tFY+Y8ffBUlLrmkl2Cr96guTv27BMxP5iLCqUvi68+tpqhJKPNXBH3SjACnfimm/7Wmsl3fI/FP75lh457oPH+1Da3M+1T8481QcT0T7UetevR618LDPdH4hTlyHLGH3LoZEk6d4PlvyVW8pfNeMwzcDwa/kCKXoTzk9tfB455o1mXyEIOa+0PrFvcFt+fIU8QM/k6guOQifzNFHkN5l/flgsOJVHibfMR9l2nhBqem+VXyFZ/xghkMTp3il8lnDPKiGQROhs2lzjEuKcVW1uWk4ybk2Eq63pxk3CK/RZLiJO+Ti/vZXw3ZX1E+kon7jv+JJMY/+Q15uIRWIKmRthZk4VTDTsnFKYZtSsItWiskObq1Pjm4f8gqIrUF5W8ycAl+nAKIT/iCf1zSKFAgkW4/7drifrLmgsHF2k87alvhblFAcbJttWuDU/VtCiyyedMXbjGfCg6n6H1+cHE+TQFGFx/3jksa2xRoZO2cZ9xsUJn6e8aOeMX1aGco4Biw1jzilm0KPNJb3nBxvhI8rrtVTlCLK5ptCiEyBS+474POhr2c+NA9Lqm/QaHEiXzONW42yN5Q2ydG3OLU4MvI7+XEdImbCWvgSkN3zB1O1YYptOhoNnRNcDM2hRjGMTc4VZsOE9fVZOioyYyjUKPJrKPGNW44XFxX41rXEPc4vFTdS9iLTnFJ4wyFHAO2U1zcSoWNU7RLDnFTb1DocaLoDJfgc+HjYo3uTjTArW9TBJHdcYJTtdEocJ0NCnE97nGBIon0RQc4YzgaXIfdHhdBHdmrJuNtceubFFFkdtrhVG0lKlx3XUrsxz22KbIwLrbBTQ1Hhxsotsb18FR0OIWvtcT9Z5sijOyfW+KM6ShxXXYrXMJKRYlTtIUWuLubFGlknrXAGaPR4jrt5riERRFH7XGtwc1sRo3LHGuKi/qo7j+uhJOr9flKMBW4QR2uxk1NR4/rKjbGRdpXG/bXKtxrAEdVHNfTDXHLf0TAvbLVCJfU5hBwMSvXABfPE0To4w1wP25i4DLPG+CmRjFwncV6nIpQSF4UE7MOd7hAIJG+VIe7u4GCG3pWh0uPouA6C/txMFOuetIR3JSrmnQEN+WqJh2BVbmaSreLS+JMudKky9Xg4jYBRXq8BndoEwmXOVKDWx5GwnVs1eD0OSRcLF+N67EIKrS1Klx8GwuXHa/C/biBhRt6XoVbnsbCdW1V4bDyoZIRZZwKlg8iI8wKLl5Aw73oEWXcoQ003NCRCm59GA3XsVPBTa2g4bqLFZyWQsMp1h6uJ09woa/t4tCaV6WBEWSy7qYrQSbrbroS2MVNzUUOAXbWSnel0sU+AUbpsl/gEjYizlgo4w5vI+Kyl8o4xEryopYI3N1hRFzHszJueRQR17lVxqXnEHGxQhmHd06yd15CgBcQlcsIYokCJi69IHDxbUycOGki9toGJm7otMC9/ism7tXfBA6zBperMIHW4HIVJsDrwsrVIYE2CNEibIHDbBDlFkFJ0AYhWkSOemxUnLFGqN2r1L8ItXuV+hfFN1FxmXH6wwYqbuivdAgXd4RQ+36p8xNq3y91flqfRsV17dD6KCquc4eWcXFbtLyCiusu0hQ0bg4VFytSGhdXICOFilNs0nFx+QOcZ5xGsGEd4DzjOC6OH+A847QD3P9jtuJ2CGjcQeP3gYM+2YQ+TYe+wMG+NETGQd+OgL6RA30LDPrm4eu/ouJe/Q37hjX0rX7oH0mgf16C/mEO+idN6B+DoX9Gx34AAfrRDeiHXqAfF0Lt/OUHrVAfUcucRn+4D/qxSOgHSqEfxcV+iBn68W/EV3AqD85Dv3IA/bIG9GsumC8IaSb+q1XYL6VBv84H/SIk9Cuk0C/fQr+2jP3CN/Sr8tCLDEAvzwC9sAX0kiDYi6lAL0MDvYAP9NJH0ItGYS+3Bb1QGVaP2LfEG/TieNDLCmIvyAi9lCX0IqDQy6diLzwLvWQv9GLH0MtER76rRqWxPgdemtwYf9kWdYdeDh97IwHoLRigN6/A3vYDesMU6K1msDfpgd7eiOmjSEf1ZdpSC3ozMuht3LA3wIPeOjDSTRdfKb7M21VCb/QJvUUq9uay0NvyYm9oHFFKdDvaChp6E23s7cehN25nh5G3vE8aZ8LGDdjMIY49zoc9dPpFx7ikHnIh7sjnHOPYTMj36oxjzDlO1UI9Xe9oUICb49iMDTBwzXCqFuKsG2gycM1wYtaFlrCK3mTgmuJU7UzkA9cUx2bDGjpFH2FucUk9pA57onGNa4lj31uhnJzEtA+ZexxLh3KpkykwL7g4D+GUuJuPe8Kx5RCuJtJbzBuuJ/hyMmCtecSx2aBzIqaNMK+4pBHwtU7WznnGiZwI9Oykq1U2tMWxxSD7hKL3MT84VQ/wwGbzpi8c+47fCsp2kt9g/nDsp6AyNqb1Mb+4pBFQKU7bpm8cS/DjQdg+aXT/wTWOzfLL8m2DfITJwLFFS/oZQHf7CecQpxq25GqnGO0nnEMcS2iSq13WWmCycKLaHZebDDeYPBz7mb8tz3aff8Rk4tiivJQd5H1MLo5NyNIN8t6cbJw6ZV2WYys6tTnHCZ2MsRM2k8nHSdG5srnBMTXNr/qzfcYLLmyucEyd8FdR7vNeNzZ3OJZc5G967mTKu7wvx4LDMfYFtz2efMYM/o7LL3OLY080byVlULNusqBx7AeDX3B9aJWH3P6aBY8rpUX+W3e2t3SXqeAZVzq0/JyLmRe7wt0fUs849t8Jzv/u8Ngq/+K8d42FhxODp/P8VQc85VPxjzc9folXHFO/1Lh1rc3BjT0S//SeycLGCd6Sxvm51abDp8xf4dyaNL1/gw+caBhLuvj6O6v36mWn5scEPe+H5hMn4uP3hUEAr63e6y+PYX//qflHY+U/fvCVzw/3ixPD98vSbV4X5ye/yfn+aP+4MvDpg+tjZ4+K8bKOnr1z/cFTU8rH/g92biFxn2S73AAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%;transition:visibility .3s,opacity .3s;right:43px;bottom:135px}.jb-pro-container .jb-pro-ptz-controls.show-vertical{right:43px}.jb-pro-container .jb-pro-ptz-controls.show-vertical .jb-pro-ptz-btns{left:0;top:156px}.jb-pro-container .jb-pro-ptz-controls.show-level{right:163px}.jb-pro-container .jb-pro-ptz-controls.show-level .jb-pro-ptz-btns{min-height:156px;left:144px;top:0;display:flex;flex-direction:column;justify-content:center}.jb-pro-container .jb-pro-ptz-controls.jb-pro-ptz-controls-show{visibility:visible;opacity:1}.jb-pro-container .jb-pro-ptz-bg-active{visibility:hidden;opacity:0;width:156px;height:156px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAATgAAAE4CAMAAAD4oR9YAAAAM1BMVEX///////////////////////////9HcEz///////////////////////////////////85yRS0AAAAEXRSTlO5DCgVgZBxAK2fQDRkBR5XTPLKM/gAABnnSURBVHja7F0Jkqs6DGQPO9z/tD8Jq6WWbCCvIjKfzAGmulrW3ooedr6ui+M4TdP++SXPr1l/SdL3aRrHhv7ZyA5qb9xe0L3Am+DrkzeCL/BeX908MezTuPsfOArdgl3KsZuhq99fk/Tx3waum+ByAHua5QbYilkzY1aP728YhrH5InrfBa57OLAtVjpRbYaumex04dq4APeC7vnVSfo/45bXLe33jGscMx3f0A1vyg3t69e2dRL/NeA6wrgdcCvjyPM2U25mXDt9xVD3f/qN0yi3Mm6P20S54vlXtGPS/R3GPSbYOsC4ZAvmJtiaGiL3Zlzx/Ht+Y/KXTJXbqmaqe9za1VYn3N7YpX/OVGev2qduOLIiB7xqOzGuWCiXFVmWtU3368A5lkqeOJI21I5XXaORxVRnxmUTdNnY/4U3riNvHMJts9XRtdXVUttipdzrK/4x7UyY6sK4Gbo+nU21T1zKcd9AGJetlMvyLKvj3zXVfeqQElMljINx3MK4xVQ3xj2Ry7N/6CiMOIfYyVUXWxUyBx7HuZRbcHt9bf/Lb9zsHlzKzabauJaK47iVcC7jJujS33/joKkmxDnM4QiJ4xDjZuT+DXQW3jgxV012qcPuiePhCGfchlv1/P0D6Czmqmuq2gPGkbIS8Q4ZsNU3dGP3Y2+cW1RyKpkrbAnwqhi3iXHFHrU3bFVV5c3vBsCsOALjkXErAW85F3rjFvBm5Kos+TngCOXYG7fA1ojFER7GPUHbmer0tfGPANeROM6pjvDMQSkrsWQ1d564Fbr61964TvSqDa6O0ELmAtvGuc2rrpQrn/aa/qCpYq+6mSpOVhnjWBy38u2JXFl9yL8acg6CV3Ur5yxVZfW4AsRxG+XKssz6n3njVMYR4Eg8sj1yi3tgtroxrpyhG38gc+h8PYddPQ551dVQW5jju2/cG7kXdB946Uy9cbDnQOpxuCCHcq5dHDcht8D2/K67VxPNGtLJd7qDTcgb1zLGbXEcY9z0Fd39GReTzIH1B/2lcxrGTYxjXnXyqxNyef8zpipVlfDsyCCmDkuumhGvWq6W+vyisqxvDJwwOxJQO6fNmjaQcRt0ZdR2dwWOFZViuculNWtgBZjk+DNq1cq45y+Lf5NxE25B3oEyLueFJWqtT+Ciqr8r48jsCAyAG+2Na53MAdQxX16VhHEL4Z7Ilc2dGad28pskaULiEdDlyijjdqa6gBeNtwSu63AnP3V6NUnAG9cu1RHOuL2hVi5qr6+9Za4qV0dCGcfCEfDIufWRFbsZvKy7KeNgW7XHvRqxWfOGLhO6XCrjoiiPb/rGdZ75uGRfj9u3B1sWAEtdLuxVF/Cq9HaMe4A4TptkZYwLqI44rmGDbUe5E8hZcg54zquRbHXu5NN6HKuO7N84YqwbdGV/Q8YF5arUN7CJTNLkgplD5T5xG+OOI2ehAhxv00ocOlpWwhHwljns5uNg6bxCb9wbueSupip6VTnj2jGudSvnOeqrVo6h7vl2nHM2K8AkV1WyfDYDvHUcWF+1VBl3EDlLKdc2dQ6aNbhXw2eAt14Nf+MqFse5rDuS8tucHUmhqaJppYF6B8440h7E8cjhqMTiLtcaAL+ga9jWZb35hpbHceyNI9WRSmHcE7n4jqbqTp2D2nnzgm154mB1hJQy1cwBIpd3twCOrjnEoDsoTZ2jepybb2VKrhpx3zB92Z29Khpl1ZN8MHWzJV1CdYQHJEeqTMbqcfEMW+obLBwES83w7AgvY0YlMNT3N96GcXEHGQdxG2t3CQ5kDkoFeO8esHd4fc193jinAtxvXhUN3Ywz5VAFGM3cENfgVM4x5YICYRumqu5yIVMdJuhaLXMAFWBKOIhbWFBiO44Dg4Usxx/cCrDyxqESMIYuu4lX1fYcGnVaaVmtKXZxHOzW5Bvfqtk3iLAFuVaLew4HluDUzIH1VYMZF+AgLMZxcAYY1uNYBTgrwPDIaqxOQ1p540IchBFTJW9c2ofNAO99Q+sYaugbJ35Vd4sKsPbGqVM33KsWmdpXLeXqufsVNwhH8FyhOh8n74foKb5WVDr0zFky1Rgt+iaJZyJTWErKMynpqsqgVy4q0xv0VTvZq0pJPq5jzn41zzzVET0aCYjmTO05OLaaBEwWqhvSenUkKtVw5P0N96mOpNr2YCPuh4AJCGEAoioDver7628Sx8WAcU2i1eM259AWhZBzuQEJieN07JR6sCHgXoSLnepI7yo9NqgCLE5A6K4hjHGKsVqK49JYEGyBg4XIq2b7FREUx5Xu2LkXOtmz2pJBW5sOqdjJF3sOjHG5fz7OT7n8DtURwjjPtBKtx7VLOa6Q4zh5Wkn8xhvU4xbG9al/s2bAbxwYkJO7XCGME43VIuMc56Dnqi7hpPE4cVopBLnibozTp5X4DHAQ49iwfsCX3IhxYFoJB8C0y7Xr1iwBSZXDulIUjF11G6+qr5aPALlCmTrfyWfQzCEK49xoPMnnjFMaq3QlX9tzQPuq0QFbLeNbMU6cHRn3Sb60HwJ6NXAhKQi61nzpPAbtQaY8jUUgQnRHSodxZSjjoig161W7HePIRGZDJzJHQXiEzE4DpRtSjotCHzkUkhj2qqJi4V54T1S6cWwVp/nhjAP1JQuDhbs4TtFW0spKTgW4UHJVFv4GQpcZA64jkt1xDNS8lDEv9Y3z5KpRGY4biIINMC7e2lxKNFJDqUfQquGZA2yrHoINUM7G6ZX1iVM2RKDUI08dssVUfduDRyI5QDkbFWCnscrWVWV1/RZ2uaRc9UyKL1LO0r7q1qwBm761WsmEUjdge9ApAB9iHHWslq4kMalHTbHQieO0zXKljHmIc4XdOE695yCpsg7eOE7t5B/4Uqv1uDSGKhDYq0r6yQWYj5NnRw5BNxjOHJAKRJB+nJqr4jcuOki4qOzs5qrgulSDJzJHUcyrCNMdOc44ty5nel815KDZ4HTypeoILAEfZJxbCjYkSpVKgZx/IpOfXgnarDkKXWIrHAm7kiTqxw1er+oQzg2Bj32FTcYJ2kpCIx9PK2XMq1ZCz+EM46LYZK4qaCsdmo+TqiOV5FWPITdYLZ2jOqYa/6LN8kJSZT1dxkSTJEYKmZ2/OuI5aIYYh+UzKn3TN9A9mLjL1cl6LexKkqof575xVJWVx3HRefdgUbGQNGs2U2Ul4HYQ60o+HeBzjCtt9VXVqXMpc3DEzh2NzEJSnr7OuG35wUTpPGjqXN3lQgdE0AWRA2sOHls1eO2yh/uqWM1LbUgvQzeatOhR5NZM39QIBNyQbviBVajYItbOYa56knCbrRrUj0vxTn5QVQlOnUtdrjKKztuqpbMEJ27WtCG3BzX9uMPfYqsmlW7SPnRfFeSqwTrAp9641VZtXi1PlX1VN44THzlvPe4k45aZLys9h+7cvuqsAqEpBAm6I9HJN24pZ9oIRzp5XzVUsVCZOnd0Ry4zbu52GdMBDn7j+NVyYXaEneW6+sbNrQcjtwdJCTh4WmlrSLcF3axRZ4AvMC6zxjj9annty1X9M8BoefBMQGKudB5UjnMnMltQV1JuD5JK5jnGTUU52zeke+4cfMJ7ygxwSZuDJ6EbzDEuZF+VNLnczCETlPdkdf3Tj5yhAHhNVdV9VXgJGV9JyqWjjRdS1fWRs6vmRctK1KuiLD/b22q2KyvlWJT1HOPeo3IWqyNwlwtrxJNrlwW69ZNr0qKnoBsfNrWVcH+w9gzIFXxDeisr5cxSrzCusFVWkpVukH7coF/0zQRt0a2QGZ1HLrfAuIfDOL/uiLOTz9UMMiTmtXMOVXmxW/P+OqM6wPBKEmLcbuiGMw4lq9cmIPbewa5iIb3LdYxxKHMouYDcSeRqcxd9+dS5VFWCGZd3e9DtR5+21dbYRd/1EAaagUC6IzDLV7YHj4pSKbmDkZRLnmRNcHeQ3azZdvLJ9qCUOVyhXGVmQUSpx9GWAzNW6Y0DmYMzrbTeHjzxxTeI41zktugXtQezAsRxon7caUt9VZZMMQ7pAPMnjnTyB5lxJHPgU+enGTeajONUHWB46wfW41jmAE79nHerNiYyFeXpBu/keycyM8983CWv+sxWLe2rsgpwH7qRxPuqb7pl6IKe61RPYpcbvefQy3e58LSSbwY4B071CuNKk31VtswlTyuJ+6oZuss1h3HVCTEDluZb7Ks6prqrjijlOLTLxRlXUhng85RLo+77por7qp5c1fUN8I1Tbw9eqce9ArkvA6eqxOOr5UQGgrdVC1l5GszHnYSuNsC4OKgeF6au73S5Mt983IU3bvwqcN3jAe9y9VgkvvHPskp7DuI9h7PfYOGNw3EcH7rx91XRPbNPT2TOEbCNNw7uJOGhG+naD8+5PG/cJcI9gYu/CxuYVuqd+6reHbgBWqrvavlV5LLvAqfdc6AFYElcyW1IS0eScqg7ch65/MvABXW5ElWvZXDiOL2Tz3VHzn7Vt4F7iNuDobMjwp7DUh3hfKs+EQCbAs43O9KgG9KDrK6vMe5qIPdd4NS7XFLi4NxzADf0eEPa8aqX1lV35REjb5zac1geuYVwtfjGbTmXsJH0qf7g14Hz9xzE9iD0qigewTv5F71qaeKNwz0HXI+jb5zLODABQWaAP+MbXA2SbxdH9KlzMjw9jvAUMr72o6363hQ4VyQ+lU+vkH70ALVFM2cnP8f1uOojhLPgVZnWI7y9Enjtcr+TT/qDJdj0LW9tqlgFQtUdYQfNQupxnHE3dw5aripmDgOcOt90gHOpOvKRF85EOCJKPSrVEa/y9D9QLDQEXKco3UiqVPtdrjZIIxNuSEeX+qomc9U+3S/WUIUgvD2o7nJ9UnfECnAPWQUiSOu8HYKERz6oO2IDuE7THcHOATIO9xzQhnT1IcblVhjnagRp8QjZVw1gHOFb9ZnSuY1OvutVe3/86w4Be7XOc6GvetsuF47jAq5dElFWn9a5oJF5YUDOBHCdds9BzRw2xrWy1jl946qrC6tfb0i7uWoKZ0eaoJs1e8LR+6pQPy66GMiNhqaV4ljbc/DqjrTC1fL8n/RVa0ummiI1r0a/yyUPT2dcXBR08s9Cl9gVpdKvEvgO00LB7qta585g4eP7jIuDxEXdaSV87TID91WdiUx67uf011lkHBTea/R6HN1XhTdr0AmROw9Pd1h5euurJvxKEh2e9uyrfnYn38q4vnp7cLchXXuVbtxdLoFx1UVtpWnv0uLNGudquTCROXj2HHI6riRK791zJakjIxDKRGatMI5fu8z81y4vzU5bE2yJmYLyFsc1jaythPcc+CW4j2grGVi71GTQgnLV1tFrQduDUj3uWvwb27p2Sefjel/PAXXy+QywoFh4IZKrTF67lC6I1H6NTHEGOBeWB88LoVm6dimoGaxO1a+RiafOpX3V8r7yGayOuZfPgGsOQhy3V4Fgew7wSlJ5hXG1aY1Msj2IK8B0BAK8cYIIxBXG9bZuD6ZUlCqk5zC0x/Yczh9YtSVK9ZA3pAN6Dqq6vnAo9ANx3Ndl0LazXPK0kq/n4J06F+45XMkc7Ajv0Sy/VzZrpAsiLUnyhZ38fQX4vOqIOXX9o9cutRlgfZfrSuZgTVxU9KrKPQdQOgd3CZB7OO9VzcnZ4pYDHddnkoVSrprlAXe5ynN5gxF1/Vi7ocenp7XdcueJyxSt8wtjhRYku2EcB3Hb1LxqHMb9192VJTmuw7DxSsfxdv/Tvn7TSceiAEpylqYnH/lWsWBRXABMVLHQ8B4cjvWUfN1xTYU6wJEqqy7y1ynBEIGt8ycqBxe2BGh3JEt3hO3HtXjm0KFJ/kHIOTDCmEUM2RFMH9ygn8N+WoNmDpcLZEgfFuz2pQPMKof7eHBME0RsxAVX3OFS1Y/Zj77jtGQ34nLRWjVeHtFF/pOIc2EvRdf1m/2UKy11s+8qtXzmAKQMhiNNTDefqiWtFIlkQqEbNJFuO7g6oqr84p8PC710IxN0R0bK5VIFV2svTx9CnCPTxspm1iiGiBKeptmh26tAYOXpp+y3HSGuqUwnONzIZN7bRAdYp4bDZpee5qq5OsAKcdTQISjykUjmM/aq3lRZK3rH6a5SOMmPZw5BQ47yQ05rvm3NVVXhkLCs0YqF2EN60A4iw8Ev1dtcNXvmsGK+qukh/QIRtEHE06cK1fXtKRfLqoTLRadcw8Ev1ZkTXBXzVdkdt5k1PvtQo/24I604520lvq0EFAvxXLXlytPHpjUXcRU4tTuiJvkx4EbugxHojrRJTn4p5jY3gcspucLQKW0lVnNhFQhF5ir+VhsvgZsN/Tji9cOcVybD6wcOuQ5MuXpxirhyt0tTea9NTPKLEVf7CdzMaZdZbpd0Pw6LZ1wiN7PhWGpwk1Uz/BzQDsSEnOB6vXT+cLscntoB3sQZ4qiaV/qOixjSJYgrBNwwu0OcSYKzzbe1ExwYrF6InFdpVp3EG+IIJSlsnWv9uEQ/DrMH497IcOgt4hNxC9vXv2b5ElhU32fuuF78IQ4r3XyreV3NST5V86K16uG6YRF3iIt1gJscxRbgE9oG88E2oa5fdMu14h9x5pZX2I9DuaGn+nGXmOdw6PHrvh+X9OWagGdNbzCkI5uk7NhpwHliSDf2sIY4wQHl6ZysWvoaqcUz4hLyGUrsfF85QIZIvASsh1zZsYsA54qTDyuHSAViM7wH+x6NarDWeRHiFpHT3HE1g5zldsk6wEDOdiiwJejFJeKYvaqtWEhFqShfFSkWHikaPHZH0qJUxEOvnK9a4CE9ibvACVQsbMLKwWTWGP6qMeKOAW6o3AWOXHHZ/TiD55BGXHZW3UScfqqPVdbGMjSLa1VVcRG+KmRI53fOOxGnn2piedpgSK9GB5h0MssBVzsM3AwdfXPuuFh3ZGLaonBUk3/J9eIfcXDmcC1RLEwotgTvuDzIDY3LwM1z0kMam/1gjcyWjQePq7VscjrE2YuFxNHMLFWB1vlwKDO4rlX1tj7ROidZNc1XzZJPZh/q73O5BPBVo90R5PaTyKqYPVhcdK3iGXHxJB8RRLT14KZYl1FW7RJa5zkf6uw2cJJ8x5kbmcDsx+yO6NglQreInO+Oy9oBNry348qh2M5hFTkH4sysihA3WXNVvAIciQSVtH2d745kdZUYQ9pa1o8QNxzLqO4m+eFcNRk6BLmWiHZ3gK+a+FZH8Y444q9qbuuTmcMLlad7Ee+Iq9gOcNJ7cFqZDjCXOs98AV9m8Y84OFitI7fLK0VcwHPoqZ+Dks+wPtRhEf+IS4kZ1ElqOZrW3CBH+ar2O24U34GT8rmqntWsif24jjFrrCtuEjkD4qC2kpqrjrQ9cpDLZXlIt+I+cEl/VernsB7Zj9PTQZIYKjkX4iCz5go7mYpZY2qdI77qH+M9MtRyGsQZXC5rrzBWnt77OQCNzKB2YIi7ipwDcZXJ5YJS54p32UM/h84WAmZZdZMTBE435LLt3sm2foajbyqtTnKCwCkVCFMlHnKSpkhev4UdYHDHke5IK2cIXKACYbuWm7UqVp7uzDsOI66bzxE4ogKBZg5ZtWqCIR2+4wDiLtl2s84m+VW8ra+YNXZ3BHaA4VwVvn8vjcipEJezr8/UDOgOMDeYYv24ZGXvtzuS0Y9jO8AT3QHmnPzhz9G4OZs5QKIvFgJW40FYqraAWRP4OQSRK4qbM55Dxpeqd4B/uiNTmj1oVg5ZhZbvKZdi1mBHMyIuanPydXdk97GW4U28GZqFSjdL8h2nkgN+x7E7bo+40rj50AHOkTq3NYIQ4tpolRXKi36/Q0rj5m2uWkV81ZS6/hRvK/XRyIHRHIby95v/d1y9WEk1lmW1uVx4XekWua4SOSHiKuZnZu3HYXV9yh6EPIc74NojBuQe9uMqUz+ujp4jI+2dt6w7Qp3ghuw+krNaNctBrwYqaNB7MHgAdyk/h5K+pdfx4GzusoafKujHRR3gDu/HhfpxfxF3lTMGbrZtCdB+XIi4KDegmQPagbh1zsufIY4QV2ltJcVXDTn5EeKKuFyqcmgrOWng9g05pZG5RFvnd8SNtDuiPKRbw9H3/ztumkVOjrg5ra2kEIe7IxBx4e7IA3DjMwf3eMfhh9xovH/73dZ5n9cBHrpFzhs4hbhgB5iqeaH5IGDWtCazZuhnOXHg5jmdVX9mDte0u9TjAQz9HB6Iu4zPHt2v7kgNe+cb3FbqeVa9qBfw39C1jZw9cMBDmr7jwqHDylzL+5Sfw2V9wcn9Iy5qZFoTaZRV9b7+k1nB55Sr2gEOaysFiFO+tCSrhqsj2yzyjyAu3lYCHnojyap85gAdfV9wu3l+x2V0R36INSvWAUbvuMvzydRprWogTmfVO+AI4uB+XF/JvxK4UHekMZRucHdkQhsQvdqPuwGurV95cl+LhUR3hPfjTN5lkFa78bUH97StdEdcOJC277h0bvgO24ty6RkQV5OtG5Pn0GIy19S8/OCeKofojlu47gjVT+4/EjZvdu9grnq1uVxAB1il1beEzRtBJJQICv1VMeKU93a0OzItbzq5k90RgLhl+XGCqy3ERdYrO0fftXnb2T3VqjfE5aigMbf3YANirN54co93XMjJT20rhWteP5Hrr/NbT+7xjoPMctOzJkqq6/Luk3u84+7vuGUBSzeBweoE3yPTm8Hm444LiA5gyMXuuJU56G3NR07uAXHVwwo5fgBTf1Xo2rjVnzq7r6yK1LwSr5HHHTdt9fy5k5/R7XLcJdX7FbeOy2dP7trPweQ57FRZx7r6+Mndq3ldEzrAY938ysl9uiQ1cJNVa2SO16X6tZP/dj8uDqJW2VfM/O/ftf7FmPkK3OMZ3MAu8G3T6ytiTTU7OK8jxMlfuN1idg/X91/9VUZ81WOVo8P+Bw+0DogP6NDPAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-up{transform:rotate(-90deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left{transform:rotate(180deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-down{transform:rotate(90deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left-up{transform:rotate(-135deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-right-up{transform:rotate(-45deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left-down{transform:rotate(135deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-right-down{transform:rotate(45deg)}.jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-show{visibility:visible;opacity:1}.jb-pro-container .jb-pro-ptz-control{position:absolute;left:53px;top:53px;width:50px;height:50px;background:#fff;border-radius:50%;transition:left .3s,top .3s}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left{left:33px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-up{top:33px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right{left:73px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-down{top:73px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left-up{top:39px;left:39px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left-down{left:39px;top:67px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right-up{top:39px;left:67px}.jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right-down{top:67px;left:67px}.jb-pro-container .jb-pro-ptz-icon{position:relative}.jb-pro-container .jb-pro-ptz-icon:hover .icon-title-tips{visibility:visible;opacity:1}.jb-pro-container .jb-pro-ptz-btns{display:block;position:absolute;left:0;top:156px;width:156px;box-sizing:border-box;padding:0 30px}.jb-pro-container .jb-pro-ptz-btns .jb-pro-ptz-btn{display:flex;justify-content:space-between}.jb-pro-container .jb-pro-ptz-expand .jb-pro-ptz-expand-icon{display:inline-block;width:28px;height:28px;cursor:pointer;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAANlBMVEVfX19fX19fX19fX19fX19fX19fX19HcExfX19fX19fX1/////Pz8+oqKjCwsKhoaHn5+eWlpaOqTaDAAAAC3RSTlP/3CaKxwJiAELtp4ri/s4AAACuSURBVCjPfZPREoUgCERXBdPRyv7/Z6/Z1aQp9oWJMyYLiKUrOIpAJBdGCldgbzBkPM/QEoTI3jBEPBRDhwEvChe08Q1Ge0ImvIq4Qj8ljrLdH77CyQPWlCdHC0Q1e9rmmuC+oQN9Q4LwcQg40L6eyqm0uEpXSUqe3fKpkkqL+Y/o+07SrahNEO0T0LBsvOitf4xsLqiNTB32wtqaVKosGLO2mhUrS93+PZ4D99wPqzMJVcbEyA8AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-expand:hover .jb-pro-ptz-expand-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAM1BMVEVHcEyZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZn////i4uLZ2dnIyMjExMS8vLy+iXNeAAAACnRSTlMAYomLxwEm9+NCLo6DKwAAALNJREFUKM99k9kWgyAMRIMmEMLm/39tKaVKFJkXl3sYJ4sAXeQ3ZOcYd0+gRYblFBuFLYoS2ot5lpvYn8zJQ65TO2GVNmdCmQq/qczw4gjpejD14BgmhziEIvCjVRlPioftHW6A7xBB1a8CCUMvsuSqEkPM7eZX6h8GrQ67bYpNIbRL6rb4/k2EfVXKsgmqfQrW9qnGq96a28jGQG1ky2HXpVysyYyeDIhWq7le6ua9P36HD6+2GRi8iBZBAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-narrow .jb-pro-ptz-narrow-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAM1BMVEVHcExfX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX1/9/f2/v7/y8vLUObqxAAAADXRSTlMA3IrE6SZi9wI+y0gNXAn3CgAAAI5JREFUKM+Fk1kOwyAMBQ04bJHT3P+0JVUMNMWv8zvSk1cipfjAKXHwhR7k6KTjYp7dVuWLug1XWB5wz96T/JD2O3Phmv0k5ypL6lVVFIPYpLOka5WKSSFvS0/BloHYlkza5HkMzrvVLo8ZlRr7mtFYWBBsBQ4BjC//GTxcGVw2PpOVHQ6fJj7qS4936OoN2K4e5yE6N1UAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-narrow:hover .jb-pro-ptz-narrow-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcBAMAAACAI8KnAAAAJ1BMVEVHcEyZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZn+/v7X19ckk9ihAAAACnRSTlMA9+NCAsuKJsRiPv/2GwAAAJlJREFUGNNjYAAC5gxFoTYDBijw1FoFBIumQHjsUavAYGkBmGu0CgqUwRqlYNyFIO2Fq+BAnIGBJQrBXerAwLkKCUxgYELmKjBYIXMXM2Qhc5cxdCFzVzBoIXMXMYAcsRsMdgEdgs4FKT4DBqdAitGMQrMIzRkojlRB9wKaB9G8z+CMGjgshjCuMCjoWNxRAxYt2KGRYgJiAQAnZcjElaB/xwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-aperture-far .jb-pro-ptz-aperture-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAASFBMVEVHcExfX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX1////9fX1+kpKRzc3ODg4PFxcW1tbXW1tbk5OT29vaVlZVmZmZ8vCMFAAAADHRSTlMAxGJ5Mssm9+NCiYtiH91SAAABAklEQVQoz41T7Q6DIAyEJQooUL55/zddhVazzZjdHyqXXo8DhWCYTWqltNyN+MZLuxP69UGti/vAsl6c0e4L+tQ2yv1AEbvecMhO5cXdYhk+6aO3WGrNAMwentlMz/ZAKIlNoRsqY2wtFWu9t8wasc0iYVN0LkQfrG1zbxNyrIBcntOQrH1Ukkb60QcxYF1xMA2dh8zWj6ZDsLCsIrL4Ds5Hm9FMbCEROWUB0COaLXEIZJKV7CKybGO7UuxjxY2C/TkMbxboKBQCxgMN6MCJQ6Ch/QjOZg/B13LGx8FDTe3IFvl+Bc9XBi3UWoex68qeL/vxmdyxyvz3NJ8f9dDef36HN7koIK2LjxB0AAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-aperture-far:hover .jb-pro-ptz-aperture-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAOVBMVEVHcEyZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZn+/v6cnJzr6+u/v7+xsbGlpaXNzc3b29vqh7uRAAAAC3RSTlMAyRjKA59J/3PzPhe1wxwAAAD2SURBVCjPjZPrssMgCIR1mkYtisD7P+zBCyZpM5mzv5hs0M8VnZvaok9BlXzc3FXbO5z0vtifFC5Kn8OL4UfxwVvuHm61d5Z0b6ZGZZwZpQAUosWsjVZntVS1sH3ZFo1IRVYfGXgx+VGwNkkIVbhq9/jm3cAhaNv1Uk3IA8mNn7D3kbQeWK3TLH2jCthrDFcTMwUWaKiClc9mJtJWhS3SF5BpJqMQW1b3xwnkDahMoHYomkeJRgSENA/MFsKML7fgoCBVbGvM+Cx4JcKWbWHKK/h1ZYS1Jy/nK3u8bB3KhzG5deMxtfv3aO7/Heq+9ms8h9fxHP4AHzAWU9zlWNgAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-aperture-near .jb-pro-ptz-aperture-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAQlBMVEVHcExfX19fX19fX19fX19fX19fX19fX19fX19fX19fX1////9fX1+FhYWbm5vz8/Nzc3OwsLDi4uLDw8PW1tZmZmYgm6a+AAAAC3RSTlMAYmOLx4kn9+NCIVJiPGAAAAD+SURBVCjPjVPttoMgDMOJAqOU8uH7v+qKFN2c597lF5LTJg1VqQG3aGuM1bNTV0wWDtjpg3pq+IB+npyzcIE9ejsDXzDCrjccs+tOariF3n2OLyw5xko0vh9MDjNb9Q0hp2GK3cixlIApe4/JD9appR8SFxWAUFLg6n63iB1irnY1Jv0mlrok7nUdcZRa1YeshxBA9iijChlxI6iZEaBgSEL2tkRcymPGGJpqlbZ6uDg0WR/F0DwuMpxDkYwiIXA8hO2uMJdGCCK6teB8RQoY8xGfevQjxYQt25qoRwDT25MRBjZ7GtP/P/afa3LHmrflXa+ruf661Hvv+et3eAF6Fh3v+sSUGgAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-aperture-near:hover .jb-pro-ptz-aperture-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAM1BMVEWZmZmZmZmZmZmZmZmZmZmZmZmZmZlHcEyZmZmZmZn///+qqqq9vb3z8/PMzMzo6Oja2tpXGg+mAAAACnRSTlP/JomLxwJiAONCr+rW2wAAAOtJREFUKM99U9sWhCAInEpLBS///7WLEWy7p9O8qEzCMBIOQ15DAlLYsoegS9yFMKQ93skl4Adh+ZI54Q8pG5nxgKzkgkcsk4zhmQxRyN1OPHqtncjOu5AuppcJ6s1EHTA1YzC3Wgq3YmzGqpsmlwZAo7F8oLEVKoeE6+TbSxK0JJ/3FLOwFnUxzXuoltYDDMLoAlmYXLAWIrkqbdZKs+q4KBfkNV1uwGaBim9TdLWS3R7iGRvCNTPB7JvGlc5EXK8cKbrxooint73RzXh7Msl6Oj/uT/b62O9j8sj6gMXX0Xwf6jP3Zr9DtNAHTYMMXrXSK0YAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-focus-far .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAPFBMVEVHcExfX19fX19fX19fX19fX19fX19fX19fX19fX19fX1////92dnbs7OyFhYWjo6Pe3t7Ly8uxsbG8vLyG+Q0EAAAAC3RSTlMAiWJjx9wm/0Lti7mfpe0AAADaSURBVCjPfVMJDoQgDERFC/bg8P9/XUSO6CqTqA0TptNDpSrMpC2A1btRT8wrNKzzjdo03KC3zhkLD9imbeAFhd3sG2kvZQ2v0NknfGBJZKkhBM9MxOxDKBV1N4iHi0TRHYjN01Qi7/kK2PtyNDU7DAEJgDAAN0u1jsQEFEkcVVmrqjeXrkWRmC67eqbgG7bJyvkQSQkvUvec7szpek6t9ubWJSK/uJVSm+APzHKCh++DWWuH4plQKNYOpfappcjy2VvJn9744cjGwx6uyXjBxqs5Xuqsvf/9Dj8rLhRg+bQ5VAAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-focus-far:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAOVBMVEWZmZmZmZmZmZmZmZmZmZmZmZmZmZlHcEyZmZmZmZmZmZn///+xsbGoqKjt7e309PTExMTQ0NDe3t774OlGAAAAC3RSTlP/itxixwImAELtp8B2gZgAAADmSURBVCjPjZMLjsUgCEVpq60G8Lf/xQ62gvNeOmZuUiWeKHC1cKnC5iJAdFuwJXgmf+xg2g//G54OPuTOCUOEL8WgMMCLwgPP+Abj2aF38CrnBR7whw6Bo4fWUk7MMrQ2OrpAq0GspTLLgKg1wTailNITZA0EaTkZGjIAY5NwlATah5CGRMJYj50tFtlWiapsLvAPRdtL/WOmET7QzZyl5ywzp7NWsjBJ1odsragJqeJ9HGFNZoLaJw71hMTm0O7NeDE1Z6YsU5rGL69sedmXXz0ToW8PzA/oV09T8OJR32fb7+B17Qe3WwtC9PVbHAAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-focus-near .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAARVBMVEVHcExfX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX1/////t7e2dnZ3W1tbGxsa3t7eDg4Oqqqri4uKTk5NImu/5AAAADXRSTlMAYieJ3MvE/0Lti4oh87zNagAAAOtJREFUKM+NU1sOwyAMY30FtoWS8Lj/UZe2gWpVh2aJH1wcO0mNqbDj4gDc8rLmiscEDdPji3rP8IX5fXLWwQWuaVu4gbKDuyPdsJMz3GLefcIPbJ6PDCEAFDlUAJiORM3NigQFAXAFlqOeRhWJyFFIHxNGvRrN0mp470U++3axGM2RAmXcXqKnkDSN0a9WIk5Sa01MpDXBQAdVtrA8lBhFnnKpsmoo5VBrhszV0KuJ5N2tP92O50iQjpzcctravoihdoi0Q1NrfN56m0VWzFBoje+OrD/s7pr0F0yUr6s5/LvUu/bz+B2ep+IHdMIV2SUZfCsAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-focus-near:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAMAAABF0y+mAAAAQlBMVEVHcEyZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZn////c3Nz09PTp6enR0dHFxcW7u7uwsLAUKT0cAAAADXRSTlMA3IrE6WIm9wI+y0gNQZpqdwAAAOdJREFUKM99U9GShCAMQ8BF3Cmlpfz/r15dAe88l8zwQiQkoRrTEa3zIXhno7lhWxcYWNbtN/fa4Q/218VFDzf4of0O8A/h3TQfOGU/ytsOj9gPVyt8warkmYEQQAgABYDxTKROz88koS6AVIB1fRCNbSI1cVUy15Jq27LGjTtyzipPeWw40/IXQkrHyZSRmqw3LaQgctFNKYzYyGACfEXossLMojFEj7J0WfdwJ3dD9uY2X25tL0Hj45mTR87Y66u9IQFsDS1bL57o7JbUDNIofvpk08eej8kTe3Hz0ZwP9UFfv8OgfgBUByCEUZhYtAAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-cruise-play .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAJWSURBVHgBtZe/b9pAFMefbSYWMjCjdOtmYEYqXVC3umMlBFRiT7qxkexITcdKSDUSYna2iqX+E/DG6LCQgcETUiR+5Pv8I3Kc+McF5ysZ3x2++9x79/zuLFEG9Xq9s/1+r8my/AnVJq4z/2LZfB0Oh1tFUQxd1+208aQ0GAa7kCTpMgRJk4kJXk+nU5NEoe12W8PM/wrAotIx2Z+w3MkE7XQ6Q3S4otNlY5zPUZdL7wiMBUsRIK/fDeWvhQ92XS0HrQia83cCsqoIyMug8gQ9Ho/DpF7FYpEajQa9VTBoyIZxWeEfv6IndSqVSjQYDKhcLtNqtaLtdkuigmGSZVn/XKiqqr9wqyZ1YEtbrRZVKhX3zvX1ei0Eh7Ufa7Xan8C9VRIUg9lyQZfzO19VOOugkBpAgaXRtnq97oLZ5ZvNJm0YQkBZ8m63E7YyKl5ntrrf77vlJCF/qzLlKLZ4NBqRpmmuF2LBlLOyBFYBchDKlIeWyyWNx+PUtS2Qtx+eJA6i2WzmQtMEA+8KnA+73a6N+jkJil1pGAbN5/PMfRBIZsGn3+LFvSABMYiBgpnJgZEeFHQD4EzQrOsWI4N/nrY2uPg/eeefV8WvAKfALOsWJ3jzA++rcqjhB25OXAd244nA62AjV4LGxWLhIBk/oPiF8pc9mUy+BZVnyQEzueEZUb5yjyvhBiX6BCw2YTGvdZNyAkYPZsprT/rgO/K2vDcdQQH7jes7gPcv/kvqyCcKbEVX6PxVAG76QWPGPZAIDcEZqGECTQyokpe9wp8VfNqzyA2L9M+KRzm19l1i6ZQBAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-cruise-play:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAdCAYAAABWk2cPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAJ5SURBVHgBtZe7buJAFIYPjsVFAok3wBUlYcUDLNvQISUSDVVCA0IUYZ8gmzcgBeIiJLwdFau8QToaQKygovI20CJBARKX/Y/jICPAF3B+yRpjZvzNOWfOzLGLLKrZbAaXy+WdKIq32+1WcrlcQe0vZb1e/8XvQT6ff7fyLpdZh0qlEr+5uXne7XZxMpciCIKMSfzGBBSyCwVMAqxpEXYEhzdeAJYtQ2u12h2aJq4gXadSLpf7aQptNBoPm81GJofEsc5ms9/0zwT9j3K5HHUSyEJ4ovV6vXkSyjHEyvxDXyCAHxGy4hEUq+4JjWQ02O/30xV65rTbQ9lKNEWjEYFAgFKpFMViMbpQnOfFPRSKWxnldrtVaDqdpnA4THalefMDinx8sDOYrY7H45RIJOy6PMibjch3F24AJEmSeo3HY+p2u7RYLEzHwNqogOUcpSvFrk4mk5Zcjry9FZCX1+46qj5dzvEOhUKGfQVyWAyPRCLqojsnEYtohpiSE5rP59TpdEhRFMN+Itw7Q3DpGq1WKxqNRurF90YC75+gnXszulBsVbvdpl6vZwpkwasDNWVw9r3BWlu5Op1OVdBkMrEzTD11RO1exmUJytb0+30aDodkV7DyjT2rBpNrG8zg3WwQW9ZqtS4CatASt/sVhAC/GA3glWk1bqcEo+TPwm0P5QeYySt9jRS9UQe54vF4fqEZkMNCdXivrw4PoJlMZoad5IeDYE7F+0KhcPC+syVotVotIQ5PdLm4CD8CGkJZfAIhzlw3SWRdM+T9q9frLbHnTnUwrfBZfPCiecQG8v3MBHj/HvAm4/P55HMwW1C9tG8a/RmsGH1CnNJ/17UakVMOx7kAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-cruise-pause .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAHySURBVHgBxZc7bsJAEIbHdprQJAU1ckln3FM4J4jTIkSMRB9uQDhBlBPECIk2yQ1MSeeIxg0KZQoKKpSCR/4xtsXTNmA7v2ThHe/ut7M7O7sIFEOGYdzO5/OSJEn3KGp4ZDy33ucxP6vVqo86H71ez47qT4iCLZfLJ0EQmhuQKNlo89rtdk06FVqtVjV49kZrr86RicG2TdMcx4LWarUWGjzT5Rqjn7tdsJAi8Ch4C4op1TGl75S8GKwCPOWC6FsRNDKAL5SOZER30HcAhbFFEUFTLBapXC5v2fL5/J7tiOCXofHLlVfikRhRrSqVChUKBXIchyaTiWtrNBruYLjM9jB5jlmup4vFQqMYyuVydKE03vsuVBTFR8pIcFD317REGQlRXBJ5PSl+iktCskgZC57eZA5l/Q/Uy4lTykjYq1++p5EHb1LC9rRdKA7dT8pO64yERG/SmVM8m81OqW7yckr8Ztv2r6Io1whnLawF59fhcLiVY0ejkWsfDAYUJfRfB+snOE85J2KRvym9RGF2Op06vwRbhg9YHgmlIz7E235B2vwC1x1VVdl7jZLT3nVF2q0BsJUg+ODFTDpU0wP3PfC5a2wB+BD7CuqLgws/TQQYn7cyxYfxfdc6ViEUujMAHUlER4cKHfhbATvPjBUG8/UH1xXJDxHoYGQAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-cruise-pause:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAH/SURBVHgBxVY7bsJAEJ01CAkJKUhUULmiDSnpoKYINyBuQIgi3AA4QZQKIQq4QaChhhuQCxAXSERUSDSIb944axSS+CPA5kn27o7X+2Z2dz6CXKDT6URXq1UqGAw+Hg6HDB4V4qj8rAsh9N1uN9rv971KpTJ2Wk84kW2322csVv1BYr+gEGMo8Foul7uWc6w+NJvNjKIoHXRVOgMg74K8AXLdFWm73a7BujpdDh3rZH8TCw8JLYlPSFutVh7NG10feigUetA0bcEDxZTiDFU0L+QN1M1mc1z7SBoIBGrkcGkSiQQlk8kTWSQS+SP7D3CzJ76c3A/yi61kodOP6XSaYrEYzWYzWi6XhiybzVI8HjfGLLeDNGxoWpohF8C50CXgwMK+r0gNCuQTENnyitQgRT4BQSOlyFvrKsRdCapC/uPuFqR0G1IZExfkH94NSzkHkk+Ap4wNUiTqPvmH74gUDoe7dOYWr9dr13M5sfNxBnjQ7/dXuVwuDGHG7ieOr9Pp9CTGzudzQz6ZTMgJ2FFtMBh8HvMpx0Ro/UEeBQq2slgsatw/uoxMsBp5A64WG+bgxE9LpVIPpUWDrgv7csUE4nEdlWCNPCC0JJXEF5Wg8MchHs11CWpCVvZVkBfckksyrneHVnMEuQRXitiqPBS4lwpEJYmORkc7Qju0IzPxBZ2t+3mW/JtqAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-fog-open .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAdCAYAAABWk2cPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAKgSURBVHgBtZe/jtpAEMYH28WJioIa+URDF8MTkAalO6cH4YIiFbk8QXS8QO6oUlCAkGjDtTRHJBoa4nQ0KBZdJApoEAV/Mp9jkAF7bXPcJ5ndtdf725nZWdYxiijDMBKr1SqhKIqKNpdms9mcRxkjFqZTsVjMy7J8x1WdL9WjC6D97Xb73G63m0HjxULAvnI1T+FlMfxBBPeEwoW73Q6we7pcViwWe8+utwKhDFQZ+IOrGr1eAH9ksOkLdSz8Rd5xu1RzBmfdFkvupwz8dmUgBENeYNAZtFQqGVwY9DZSnTVi6+Decrn8h0JYWSgUKJfLHdrL5ZKm0ykNBgOazWbCd9nNt3CzgoZjpUohlEqlKJPJHN3DJHRdp16vR51Ox/ddTiVkw70NlSTp7rRDMpmkWq1G4/HYvmANSpHghXg8To1Gw/M5W1oGVEZD07TvXNy4O8Btw+GQFouFbV06nabRaGRbhbaf8AzvTiYTr8c32Wz2p8KrSuMgJ7x6IEa4AIuivau9tNlsNGm9XqsBY9gxhOvCCi728wa7OCHhRzQAZl2pVGyXRZEoBAoFCDGs1+v2QooiUfpIHE+LBEL+VatVezVfDcp/whYJhAWBVHFvCEFCfz8op6cpORuxJRokajz98tSRae+97OJnUS/RanQLkwNQ4FrzsA2yyU0Gf/br2e12A90LlwYAkS5Pdrm/wRs+/rh1ChAshuV7wTqAQoTAarVat6gorll8YWvzXBXmbdTUcY3/sK/L+4ppmnPeF/9SCGsvAXIsH8+gDthkMFyepyuJgS0GHh3w5NNOzO0zeMHVD/R64BMDP53d93vBORW+0GVnpvmpS0NBXXDDSacwR9K5kxaPok+NUJ8VDlzlQucJvKNj63G2/U3/E78fZqx/rk0w4ggu8jUAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-fog-open:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAdCAYAAABWk2cPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAK2SURBVHgBtVa9jtpAEB58/EqArqCBBqehorgTVDQxPVJOiD5QAEJUeYLjniBcgRACKeQJQsQDBBoqkIigookraC1BwT+ZcQBBsNd7Ovgk2LV3vd/M7PyZgBPlclnc7XZPFovFv91uRZPJdL9fktfr9W987udyuRbPWSajDUgm3d3dPSOhBMaQUaAXu93eSKVSCryVtFQqPaBWXznJNMlR8zo3aa1W+7zZbIo4vYd3QBCEQjqdfjEkrVarzyhlAa4EvOt6JpNJ6ZJWKpUnHH7A9VHMZrNfLkjJO9Ecv3Aqwg2A1osevFs4vCQP5SF0Op3g9XrPflar1egzut9vCNVHzPS3j8EkcCAcDkMgELh4PxqNoNvtwmw20/tUXC6XSRyLqqZ7LS80CgaDqiY8IEESiQSIoqi7B53qE40qqVYsolTgdrshEokAeh9IkgRGIDPTPhJYC8SD4eg3k52RQNQi7XQ66txms+kepEUcjUah2Wxqrq9Wq6h5Pp8/4CUzD1osFuqPFwfnIsE1IBKbyDqANIzFYhCPx+EtYPmCGQxAppJlGQaDAVwLaFlBgRtAx7T/SNGNZWCg1WqpoeP3+4EX0+kUJpOJNiEqKWA49HGusA6gwPd4PMCLXq+nu4bpsK+6LVV9YMDlcqnkPKD7JyF1oFD+PSSHV9ZB4/EYQqEQGGE4HEK73dZdR+Ua6nh4gWXtD3AkfCL3+XzHZ4pfyrekIQnHApr2A2oqn4YM1TvDWkr3xbozPVAxJ0KaH1MRFllSvQG3gYztz7FtOct/mLqorZDhulD2BVzWJKW2kTZckVjBnviM8IKUQBtQ40ck/w7vAIUhnvGYz+cvwpHZbGNHkcQMwtXGnICs9YrCF/Q2GHb4e3IJBxLgo44ACsY6afbT4XDUWd09N+kpqOhTDT55Jf9/Z0b4C/UJLQCcLGi1AAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-fog-close .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAIjSURBVHgBtZe9UsJAEMc3l1Q0WvAA2NEp1MyInVbGnoE8AVrSqW+gT2CY4QGwpTEFNcaOEqksKKiYcYYP/xsSJoRwx8f5n7nRu9u73+7lbu8waAc5jnM6m81sIcQlqhcoOZTTsHvAZT6fv5um2XZdd6Caz1DBMNm9YRgPMYhKHhx8brVaHu0LrVQqNjx/2wOWlAtnn9MiT4VWq9VHDHii4zXAPFdJsPGPwK1gIwHk7/dC+sXgAsBjroioFZsmpznCuHKLxeIxqqwirdVqvGmcbaOy2SyVSqW1tuFwGJTRaES7CEGd8TJbXOEo4YkjG8BQ27ZT+7rdLrXbbSUcx4+P3oOIVQ4Wr0Cj0aBMJiO1Q6Q1PvsirFzSkeKVqNfrKjM+8xeCybRMbUcrn88HRSasqs2R5kijisWitB+reiKm0+mhaS5VvMwqCdIs1WYKoJZljUmj+NwqobS8D7Wp1+tJ+5EPvkWYD33SIE4S/X5faoOHgCdC+jsdKc5GnJUUGiNIzwrpbjwh76tOpxMAJ5OJyjTwKp7wP/CnvM06LeFzdPwNd4AFihL+Chom/U86/HmiAr4CGOR4M2r0fX9cKBR+8e816deg2WzeRJW15ABPXvgxRXoVPFfiDWbSAhF7iJiXvUyagMmHmZlmGYK/aXn7HPSNAWui3AH4s9EnG8ibiy94DL6l3W8jL3zvetsMpNCEAw4cKGPCc9r8WeGj/YuBMlikP+yn3EGZYjlWAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-fog-close:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAI5SURBVHgBxZZLjtpAEIbLjUFih8QFnBUrJCIOEHOCROIACStALBhOMMwJMlkgBCyYnCDMCUJWrJCQYB+zBwkJFiBe+QvZyEz86IEezSc1dj/w76pyV5dGEnS73cR6vf4SiUQ+HY9HE0MJu5GmaSNcFrvd7hn3vXK5bIU9TwsTw8Oqh8PhzhGRoIf1tSBxX9F2u12FVfVXiL3ksVgs1khWtNVqfcfljm7HgtW5l1Zrbyh4Fo7FYh8LhcLCGRDu2U6nc69YkDG22+0v98DZ0mazaQgh/tLbUUOMHy9E8eH8treDJ8lkkgzDuBibz+c0m81otVqRBAu4+QO7WecerDSDBB3RbDbrOTcej2kwGFAIvNc5dPVTTLHpv9INpNNpyufzBEsC1yF81dOVf8KslCHIEy4SjUYjIxDLDDoGKYAtDrNW13VT7Pf7azOOJ6lUKnAeySLB7jVIIezmMAS9A/igxIIUwns3VBQ+HpFCLMsKnGcjhX0CKLF2MpnQcrkMXIPzuX+KKax9phthtw6Hw7BlVqVSGel25wntqqy02WxOFnLj+xB6/HNTwmdXTqdTGTHmfKA7lhKSxAOCbPr9g90n82X6AcGfTgVxUTngtKlD+J4UAw/2S6VSzulfJAe8SZ3fiNRiQbTgHvgvI0H4Gxb9IAVwTexVmHmmQbiCD1suHy26En7xaDSa86p/A4ttrpuIT3ohpLcTxw/tAWJ9vzUaSWCLm3DXZzS+z7hELFzYjX/i8fiTu9T04x9LgQk+PbvDKQAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-wiper-open .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAYAAAByDd+UAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAALxSURBVHgBtVYrrBpBFJ0FFAqBqCIrUXRBk5QaUleewBBIlwSDoW1CUlzz0CSlaJJHA8FgqMBg4CWIKkpRuEfQCDAYAvSeYYfsLgss6fYkk5ndmXvP/c1HYjaQTqdjbrf7PQ0VarLWgBW1Cdput3tutVrdW7qka5OZTEZ1uVwfaBhj9jDf7/ePzWazwe4hVFVVPhwOT3cQnRFLkvS20WjMzRMuCzKFyH7/AxkAg18oQp/ME24LsgENfcwBkJfvQqHQejqd/jr905HJGpnMHIYW3iHGp5AS2df/QabpfiKHfCdCVCN1qh1hv9/PIpEIi0ajLBgMskAgcDZfKBTMYjJVL8+nh7MeS/8ioFiQbDYbtlgseA/lXq+Xt9lsxrrdLkulUmw8Hp/poLB+JC+rkpa7FysiKEokEpyo3+/zBiIrr2EU1kKmWCxariMvsy46IRJWZFBSLpf5GApgvZUSYLlcstFoxMfwHnKQN4Mi+cYdDoe/IGpmslKpxJV0Oh223W7ZLcTjcb6uVqtxLxFayJtkfSga2Sycy+X4YnhlBzAQhGI9euTRqnhAaNjkKA5Unl0yALlD0SC0Au12m/fIvx5nRxsEYV2lUrHMgxmiYMwGwmiE1qzDrSiKSv0r8QMxF2Wdz+fZer3mhXAJCP9gMOCGAiBJJpM8h6jqXq9nWO+hLfGH9ohiVoTFIEbxwFqECMoQInwLy4V3mEMe0SB7YWtMJJzoVK7fLnkgKg65BVBM8Bj5gjHwDORoGNfrdUMu9SDnfl7d+HqAEArFCYNQYoxtAODfJSIBimTWpV2Swxt8PLwiRAgjGrwXuEVGmBNXw6MxP5KXMWYTCKs4U+2COH6g59tCu6uG7A7AYxteCcypVU+EmgVZbcJxkO4HcmplIEQuMcGOTz8nyT6T7on4Npw0mMBzgDnkqUZWNfyzWujQM/FB79lVQh2xeudbZ0VE36mvipzdRagjjtFtnSBlr9nxuS9uGCidk1HPdFp1xcvsGv4CcbeEIeSIw9MAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-wiper-open:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAYAAAByDd+UAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAM0SURBVHgBtVa/SyNREJ7dBLVQELU4bdwrtFJOUCxsXEG0Ue4OK6tLKhEb/QsO/4LzChVtvGvE0sPOxtiYJuFyJBBICtOlSSAQCEnIj5tvyC676ybZYPLB4y373ptv5puZt6uQB1xeXup+v/9zs9lc4qHxK621VFAUJYbRaDSeDw4O7rvZUroQBXw+3zcm0ckbMkx8enh4+It6IWQijYlueiByI95g4oxzQXW+OD8/X1JV9e87yACNbbxeXV0dOxcUJxnn6okfx6l/OOHcnr0hhIzsFcg06jNa8obwbErKOfs+CDIhUdUbxrhJiGrknAW8HB4bGyNN02h+fp6mp6dpcnLStj46OkpbW1vOY1q5XJZ8iqTX19dPnYpkZmZGSObm5qharVI+n5cZ5ENDQzKy2SxFIhFaW1ujTCZDqVTKaabA+z4qrdy9uhHB0PLyskSTSCRkVCoV16gRLfYODw/T7e2tOOQE5zKocmRf3Mggzd7eHvEtQnd3dxSNRl3JgGKxaEaE6HEO553gOllXuQ3W3ch2d3fFyMvLS1siKxYXF0XWh4cHOYfzUMgKpE1t3Y026LouhxCVF8DBhYUFcz9m5HF7e9u5VUOV2pp8dnaWpqamPJMBKysrEh2kNRAOh2VG/q14c7XhILzb3993zYMT2IOCcTo4MTEhkqKgrPCx1gGePxgv6vW6EKJYNjc3qVQqSSG0A+RPJpPiKACS1dVVaQ9UdSwWs+33c6n+47ZYchqKx+NCvLOzI80NiWAMEkFyeM75lx5FdFhD4SCXIEJluxRbTLm4uDjmcv3RLgIYgrfILZBOpymXy0m+UImIDLLCATyHQiFbLq1gB/90bHwrcNPAILwGAaREcz8+Pso6CNsRGZDGx0eS8xXqRgh5DYkgIwZuFQPdyGACfwJ+PHGhnHKUOnkEetS4U72Co/uNWdoC3yovUVqBiD1EZW4fGRk5MwkBjjKIBRoAarXa12AwWLARIpdY4McC9RcnR0dHZjPabhosMOkG9S9S2/8MMLDfRKhljawjoYU4wNXby79OgavxJwrEyFlPhBZinasYH+pPPOMaNL4wMJphJZ553Bt/Zp3wHwTYnvHjbDCuAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-wiper-close .jb-pro-ptz-focus-icon{display:inline-block;width:28px;height:28px;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAANOSURBVHgBtVc9TCJBFB52r7jYyCXWnhcbO1aru8RErjGXWBxX2BijGG1svNhJo56VhQU2ViQsgdDQYEFDc5rQ0BDoaMgRGhoKaMhdCHDve+6QZVhgMfglm9mdn/e9v3kz6xEusL+/79c0bcvj8fjpc8V6gCY9VXqK3W73MZFIpN3I80wj03X9il79wh2q/X7fJAXvTdNszkQaDAZXaHF0BrIRcvLKERE/OQ3qYwh/06shXg8vRPl8PlEqlZ4nktoIV8QcgBxwItbfitBOvL6+XioWi2XZp8kXIryaN6FNdpSM8spvthRWUmPaJ25vb4vV1VVRqVSGBCwtLYm1tTUeW1hY4KfVak2hFe+J+B+5+QkfnL2Hh4fI1KAq/OLiQlxeXorl5WWxsbEhNjc3RbvdFrVajVvMkcTlclmk02nRaDTGETfJ1Z+wlTxWLP84zQoEAkwEodlslh+QqZDWY36hUGByp3m9Xu88Ho+HNaokfidCCAIh2kgkMlYQAOtyuZy4vb1lBW9ubnidCioa37klkw0nQrgWgpLJJMfXDUAOBbEO66GAAkOS+tSRk5MTXgjr4FIAMXULrIObz87O1CHv8fHxR03thXAkDhbahezt7TlpzkA8sc7uUngIQIhUgNRr70AWQsu7u7uBEPTBdaqboQTcCIUgXL4DUBzjTrHVDcP4BmVlR6fTYVLg9PSU9yC2CIghEG7HHIC2GluJOGYyGR6DYiDCGEKD/iErNS2sU238QnH9rGqDopDP5zm+EIJ3aL6zs8NEUAKCQSiVwxp4BFaHQqGRwkJoRqPRkEZ7tCjGAAJQHED28PDAVkirsX1kUcAc7FEXWc5c7+iQTltnpyMgHNYgUSSZBL5BhqTBHkVMAZnxKsijMbQ6Vf+/FFe/mFLs6/X6IJZ20t3dXXb34uIihwFuTqVSI3Mt0nPia3LtpVLot461V0FWLiiBZwzMWCx2xOSy5+DgIEya/BRvA1xfvlKxr+JjUBwola/Fy81u7iDCX5JwiJSPHNJm3sQWoTnUp06a57XFIrxW+0dug8guutM8EvEHpxPIJRDDH6qFA2UmrSSrg0SO5HJLjhDdUxue+bLtQG7QqR8ggVvC4beCFHumRMSxVJxEJvEfnFm91YrgD/sAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%;cursor:pointer}.jb-pro-container .jb-pro-ptz-wiper-close:hover .jb-pro-ptz-focus-icon{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAcCAYAAACdz7SqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAANwSURBVHgBtZa/SyNBFMffboJaKAS10SZ7hXqFegGbCyjmUPTAH3eHlZWmULG6u/Kqi/9BLEQRIdoKigf2l1Q2ChGtBLmtFAQloPgL1HvfRyZuJhuzBvOFzezOj/eZee/NZAzyoMXFxYjf7+99enqK8GNxlZVtyhiGYXOZ5vo/MzMzW17sGaVgPp/vN2DkTbZpmqs8wfloNJqh10AZZjEs8QpYAfzx8TE6Ozub9AQFkGf7l55dWLbYTmxqamruRehbAl8CG5UEOvTNmWSmekHCVAgIJVgB9eHHD1bJSTPp7NXe3k68Hejg4CBvdF1dHTU0NFBVVRVdXl7S/f09nZ+fl2BS4Pb29geXMXyIe5eXlxM6FMaHh4dpY2ODGhsbybIsamlpyUFQog/geE5PT2l3d5eurq6KgTPc7x22kpGN5T+3Xl1dXdTa2ipGDw8P5bm7uyvoB3hTU5P0t22b9vb2ZFIu+smxjSOmEbfW2tpaAcJgMpkUQ25ACG4+Ojqi7e1tqq6uprGxMRmvi8P1BaXJLyE34MjIiBja2dmhzs5O8iLAMUGMw3h4yCkOobDYs+YHfXAkEpGBWJ1KJMTUqzAObh4cHNSbAisrK0FTrw0Gg5I4GKiEBAmHwwUzV2pubpZJOV0KD0EIkS6TlxxwViALMcvx8fGcEdQhKzs6OvIGYxLIcEwIxuFSvEP19fXSjpzQ5RsdHf3M5XtV8fDwIFDs0f7+frq+vpYtAjAMwu3oA/X09MgqEcf9/X1pQ/wBQhuyPZ1O56/SNOO+oaGhMAM+6rM5Ozuj4+NjiS+M4B2ZGQqFBHRyckLd3d2USqWor6+Pbm5uZAySqa2tjdbX1+VbU2Z6evoX3JumIoIBHA6ATUxMyCl1cXEhQOxD1ON9c3NT9qjufl2K5a+pqdliA4liHWEc7kOiwMXOvYpvwJA02KOILwS3FoGuoTTlWDKMJJUQ4qwfDmoyAwMDkkiYIBIOsS2iJH7k7MW1JPu3VpbUyYWVw91u4oWtcjyjOSi0tLQU54bvVBnh+vKJry82PnKHAydFDI1UATFwTgHzoIgtZvPW4Cxw1VlX0YtZFhjT6wvOXrgBK+ZnjcqXimHMrbHUZXuSV43kCpE3IUTzvPfjr75s61pYWAgx/Ctndy9/WlxaCsKPzZs+xc8Ww9IvwZT+A8hTw5fcMmXrAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-ptz-arrow{cursor:pointer;position:absolute;width:0;height:0}.jb-pro-container .jb-pro-ptz-arrow-up{left:71px;top:15px;border:7px solid transparent;border-bottom:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-right{top:71px;right:15px;border:7px solid transparent;border-left:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-left{left:15px;top:71px;border:7px solid transparent;border-right:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-down{left:71px;bottom:15px;border:7px solid transparent;border-top:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-left-up{transform:rotate(45deg);left:32px;top:33px;border:7px solid transparent;border-right:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-right-up{transform:rotate(-45deg);right:32px;top:33px;border:7px solid transparent;border-left:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-left-down{transform:rotate(45deg);left:32px;bottom:33px;border:7px solid transparent;border-top:10px solid #fff}.jb-pro-container .jb-pro-ptz-arrow-right-down{transform:rotate(-45deg);right:32px;bottom:33px;border:7px solid transparent;border-top:10px solid #fff}.jb-pro-container .jb-pro-loading-bg{display:none}.jb-pro-container .jb-pro-loading-bg,.jb-pro-container .jb-pro-poster{position:absolute;z-index:10;left:0;top:0;right:0;bottom:0;height:100%;width:100%;background-position:50%;background-repeat:no-repeat;background-size:contain;pointer-events:none}.jb-pro-container .jb-pro-play-big{position:absolute;display:none;height:100%;width:100%;z-index:1;background:rgba(0,0,0,.4)}.jb-pro-container .jb-pro-play-big:after{cursor:pointer;content:\"\";position:absolute;left:50%;top:50%;transform:translate(-50%,-50%);display:block;width:48px;height:48px;background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAIVBMVEVHcEzMzMzMzMzNzc3MzMzPz8/Nzc3MzMzMzMzMzMzMzMzLVn6fAAAACnRSTlMA+duduRUwSGSD8toSsAAAAI9JREFUOMvV07ENgzAQhWG3lLSp6MwCViYIRSpG8AiM4FWMaPymDBKESMF/cQ0S136F353vnLuo3gp1kOYEoCXW4LFKIZAnqAXYICeASoAdzgG+cApwgF4EfwF+oDkCqIwA6gnyAKA8AaizQhsBAjzuqUHofInGIQbjRxXjMrTJuHDestR4Bng4eGrN0929PqNfzC6h06weAAAAAElFTkSuQmCC\");background-repeat:no-repeat;background-position:50%}.jb-pro-container .jb-pro-play-big:hover:after{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJFBMVEVHcEz///////////////////////////////////////////8Uel1nAAAAC3RSTlMA+rbVFUecgC7rYhEEz+4AAACbSURBVDjL1dMhDsJAEIXhdeiGpKYWQVKDWc8ReojFc4ReAlPFFQazad7lIGkb0jK/WEPSsZ+YN5mZEP5UrZIP0vgE0Kv3oPlIJlAk8AJM4ASYwAkww2+ABfQg0ImgugPYsfNBVl99kA0AsjOArAPQpRTGHiBB8whxM0y+3skXNltsvMYriIWrpWPA80mFJ5qL3gAfB1/tcAt7rzdiV+HEgs1oBgAAAABJRU5ErkJggg==\")}.jb-pro-container .jb-pro-recording{display:none;position:absolute;box-sizing:border-box;left:50%;top:0;padding:0 3px;transform:translateX(-50%);justify-content:space-around;align-items:center;width:101px;height:20px;background:#000;opacity:1;border-radius:0 0 8px 8px;z-index:1}.jb-pro-container .jb-pro-recording .jb-pro-recording-red-point{width:8px;height:8px;background:#ff1f1f;border-radius:50%;animation:magentaPulse 1s linear infinite}.jb-pro-container .jb-pro-recording .jb-pro-recording-time{font-size:14px;font-weight:500;color:#ddd}.jb-pro-container .jb-pro-recording .jb-pro-recording-stop{height:100%}.jb-pro-container .jb-pro-recording .jb-pro-icon-recordStop{width:16px;height:16px;cursor:pointer}.jb-pro-container .jb-pro-zoom-controls{display:none;position:absolute;box-sizing:border-box;left:50%;top:0;padding:0 3px;transform:translateX(-50%);justify-content:space-around;align-items:center;width:156px;height:30px;background:#000;opacity:1;border-radius:0 0 8px 8px;z-index:1}.jb-pro-container .jb-pro-zoom-controls .jb-pro-icon{vertical-align:top}.jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-narrow{width:16px;height:16px;cursor:pointer}.jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-tips{font-size:14px;font-weight:500;color:#ddd}.jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-expand,.jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-stop2{width:16px;height:16px;cursor:pointer}.jb-pro-container .jb-pro-loading{display:none;flex-direction:column;justify-content:center;align-items:center;position:absolute;z-index:20;left:0;top:0;right:0;bottom:0;width:100%;height:100%;pointer-events:none}.jb-pro-container .jb-pro-loading-text{line-height:20px;font-size:13px;color:#fff;margin-top:10px}.jb-pro-container .jb-pro-controls{background-color:#161616;box-sizing:border-box;display:flex;flex-direction:column;justify-content:flex-end;position:absolute;z-index:40;left:0;right:0;bottom:0;height:38px;width:100%;padding-left:13px;padding-right:13px;font-size:14px;color:#fff;opacity:0;visibility:hidden;transition:all .2s ease-in-out;-webkit-user-select:none;-moz-user-select:none;user-select:none;transition:width .5s ease-in}.jb-pro-container .jb-pro-controls .jb-pro-controls-item{position:relative;display:flex;justify-content:center;padding:0 8px}.jb-pro-container .jb-pro-controls .jb-pro-controls-item:hover .icon-title-tips{visibility:visible;opacity:1}.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-face,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-face-active,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-fullscreen,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-fullscreen-exit,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-icon-audio,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-microphone-close,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-object,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-object-active,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-occlusion,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-occlusion-active,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-pause,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-performance,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-performance-active,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-play,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-ptz,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-ptz-active,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-quality-menu,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-record,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-record-stop,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-scale-menu,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-screenshot,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-speed-menu,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-template-menu,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-volume,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-zoom,.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-zoom-stop{display:none}.jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-speed{padding:0}.jb-pro-container .jb-pro-controls .jb-pro-controls-item-html{position:relative;display:none;justify-content:center}.jb-pro-container .jb-pro-controls .jb-pro-playback-control-time{position:relative;justify-content:center;padding:0 8px}.jb-pro-container .jb-pro-controls .jb-pro-icon-audio,.jb-pro-container .jb-pro-controls .jb-pro-icon-mute{z-index:1}.jb-pro-container .jb-pro-controls .jb-pro-controls-bottom{display:flex;justify-content:space-between;height:100%}.jb-pro-container .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-left,.jb-pro-container .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-right{display:flex;align-items:center}.jb-pro-container.jb-pro-controls-show .jb-pro-controls{opacity:1;visibility:visible}.jb-pro-container.jb-pro-controls-show-auto-hide .jb-pro-controls{opacity:.8;visibility:visible;display:none}.jb-pro-container.jb-pro-hide-cursor *{cursor:none!important}.jb-pro-container .jb-pro-icon-loading{width:50px;height:50px;background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADwAAAA8BAMAAADI0sRBAAAAIVBMVEVHcEx4eHh4eHh4eHh4eHh3d3d4eHh4eHh4eHh4eHh4eHiqaCaRAAAACnRSTlMAikwX3CxpwZ7zIGi5xgAAAZ9JREFUOMt9lb9Lw0AUx2Njqm4iGEqmEnBxslKUdhJctFMVcW5wKZ0CLpJJQRw6iVqQbg3FtPdX+l5+XHJ3796bQj557773fe8ujtOI+6jz5p84dHjTkYD4+RhT9CIRZWRPJr1bChnbY532GhT4oUpbI6HEuqvgIH9544dh9J4/rpr0Ms8oV3zMNT7X1MXSmaznzfHjM4n72moe5n8ryYrW9rKRvgf0S93JA7yKa9lbzUg3keJb8OVCtwkrFmoO4MnsAuj5rGqnZg+GZXUXkl9NjEui9n3YA9XgpMgakLXz6ujMTIgrCkPVv0Jil8KgKQN/wRN69hLcb1vrbR2nQkxwiZTGQ5Teb7TO8PUaS8Q03sE+zkjP8qbjzgJtEhRbV4gnlkOFeM7hDYNdxPbiYFvKSHN6L2NmY5WzMYPtplZdTxncRvn2sI+DHIoug22jWMaA12Y7BrXzrG8BX32XPMDKWVzw1bdMOnH1KNqNi8toqn7JGumZnStXLi0e4tcP6R3I635Nc/mzsMxl9aux9b78UVmn2pve8u6eR50j9c0/ywzyVl5+z84AAAAASUVORK5CYII=\");background-position:50%;background-repeat:no-repeat;background-size:100% 100%;animation-name:rotation;animation-duration:1s;animation-timing-function:linear;animation-iteration-count:infinite}.jb-pro-container .jb-pro-icon-screenshot{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJFBMVEVHcEzMzMzMzMzMzMzMzMzNzc3MzMzMzMzNzc3Nzc3MzMzMzMwv5ahDAAAAC3RSTlMAW+8S2UGVwyyZnOTRhEMAAAEfSURBVDjLzZKxbsJADIYdBQpsSCyVMlXAdEuQYGKpWFk6dWHJgsQ7sMDSpUv3PgITAQT0f7ly57ucQ45KXRAZot+/89k+X4ju9KzL4XOhlm3pR0enYrWViSRPXQIQSASkRSkIJEFRimYCuQAHSW89IOv6SH5TCsuAj68Ab1wDzqkAzqoC7AUAPtgsABgkBBgkCJiNHehGok//KRVsHqd+3Dj1/vukt3AH/Jj05s5/AmyZhFVWXDls44iVvfQWkCvgxU6g9ZdJfCLvjJbYaT3GvjOY4mQSG3SJGjhr/Y1Xohp+TGKqqzexZ/1GVGdNCitt6R8zVvb9d+JmKdl8o5sPWbtxT6zFuJcDQtk92MNmYiXHquYlZlVt1j4P6cd7fgHFW7Nhqu29TwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-screenshot:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAALVBMVEVHcEz////////////////////////////////////////////////////////NXt0CAAAADnRSTlMAWe+X2TINXUYZvctoYyuS2NgAAAEVSURBVDjLzZKhDsJADIZLCAsEg8WgEGCWGSwCgZxB4MgEAonB8wQEXoBH4BEWXgPJgEBG0mdgR3u77raRYAgnlq+9+/t3TQF+dKbZcJXSYSnzlViT457lRScKmBqILSFA3GoO8S4E+Ex5JiSJ4CbVdyOQdZNomX9D4dl+ko3NC8vFFmhPy8FIsi0ZlwLBW/LY5BxYYreUSgoFAEmhB5Rc9OCbUoXmTmDadQKTn4y6A/XTaoSKdb6KyGU6RJ7eHgpb3ABinAoil303xB6vQnRahNhXvMdre+fzOgxVrokX4jHAnBh8PALU8Eq8BqgTg/vePF8tpuPy9/NFaalSc273RizarYqfkswjifNMQ/TyTGMv4v87L+ks5gqDbc9OAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-play{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAIVBMVEVHcEzMzMzMzMzNzc3MzMzPz8/Nzc3MzMzMzMzMzMzMzMzLVn6fAAAACnRSTlMA+duduRUwSGSD8toSsAAAAI9JREFUOMvV07ENgzAQhWG3lLSp6MwCViYIRSpG8AiM4FWMaPymDBKESMF/cQ0S136F353vnLuo3gp1kOYEoCXW4LFKIZAnqAXYICeASoAdzgG+cApwgF4EfwF+oDkCqIwA6gnyAKA8AaizQhsBAjzuqUHofInGIQbjRxXjMrTJuHDestR4Bng4eGrN0929PqNfzC6h06weAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-play:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJFBMVEVHcEz///////////////////////////////////////////8Uel1nAAAAC3RSTlMA+rbVFUecgC7rYhEEz+4AAACbSURBVDjL1dMhDsJAEIXhdeiGpKYWQVKDWc8ReojFc4ReAlPFFQazad7lIGkb0jK/WEPSsZ+YN5mZEP5UrZIP0vgE0Kv3oPlIJlAk8AJM4ASYwAkww2+ABfQg0ImgugPYsfNBVl99kA0AsjOArAPQpRTGHiBB8whxM0y+3skXNltsvMYriIWrpWPA80mFJ5qL3gAfB1/tcAt7rzdiV+HEgs1oBgAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-pause{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAFVBMVEVHcEzMzMzHx8fMzMzMzMzOzs7MzMz4IlKVAAAABnRSTlMA6xIU1hVqIuOVAAAAMUlEQVQ4y2NgGFYgLS3RAEQziQFZoxKjEqMSaBJpEAkgIw1ZQlBQRAEs4QhkDeIMDgAWx1gMHyIL4wAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-pause:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAFVBMVEVHcEz///////////////////////+3ygHaAAAABnRSTlMA7OgSFBXMmAA4AAAAM0lEQVQ4y2NgGFYgLS3JAUQzi6WlJY5KjEqMSqBJpEEkgIw0ZAklJSUDsISikpLQIM7gAJjhWp6XcaOxAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-record{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAHlBMVEVHcEzGxsbMzMzMzMzLy8vMzMzMzMzNzc3MzMzMzMxEZ/aQAAAACXRSTlMADPKKKeBxlV6neZ4mAAAArUlEQVQ4y2NgGDZgJhpowCURAJeYpIQENJEkCgWRgCeShAGyjfgl2EqwS7BFToZJiLg4ICTEImdOh0pwRM6cDJcIjpw5E6aDFeh8B5gECBCUYAkKCoMbNXNSOlyCgUEQ4apJJmxIEkjOVWFgxi4RgEsikGQJnEYp4pLA6VxUDyJLIAUJcRLIwY7qXKSIQvOHWCQODzKIleBPPjgTHM4kijNR48oGkajiYUMykwMAAfmZhUjBISQAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-record:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJ1BMVEVHcEz///////////////////////////////////////////////8NopmKAAAADHRSTlMA8gyMLeRw1x2DBpWGN2QDAAAAt0lEQVQ4y2NgGDbgDBpIwCVRAJc4KIgEZM4EwCU2KSEBHySJCcg29iBJGCBLgHUs24FdYlnNYZiESksDXKJALebMaagES8yZw3CJypgzZ2A6WIHOd4BJgABMgh2XBEtpaBjcqDMHs+ESDFyLEK46aMGGkEB2rgQDJ3aJAFwSUSRL4DQKp+VHcTkXxYMoEkhBQpwEcrCjSCBHFJo/1GIO408MOJMPzgSHM4niTNS4skENqnjYkMzkAEgzyFpeX6L3AAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-recordStop{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAAOVBMVEVHcEz///////////////////////////////////////////////////////////////////////99PJZNAAAAEnRSTlMAMPndsnZj1CSYPcmL6wsdoU80pOJLAAABu0lEQVRIx5VV26KEIAhMA++p9f8fezatFDPr8LSrjMxwiWlqzRuMYKW0ENH46c3WuGyVLXEduuO83UyK59fl1jX5EEUXD9DOaSh43XEP5zUIdZ4pAecTofVnWSt3ip4rx7N61vjnY1D30CpH4QQR8vsP+RA5Rs3KpjfMU/pMim/LgbvH7DF2F8sU0owqapKLqgnhuGWwImUagn2zhUX6WQQoYkXG9WxSAJd700/ygsCpAoliaDsPiG48GM1X5Ft/06sfp8DrDE+3DpekWjxM6366fgEcnklC+AIIWYQmPEeAaUmjFOnhCLDfxZRH+w1gU5b/DYjfNcyJ0p7dxX8B+FwxQVtvAGB5ig0d5gFA5KbzS91hI8CenvlHflfN/XvzJQnxbBEko1gbvVnPii+FadSVRUEaYylQfJtpLB+aRG4LY/80yKdUbCraM0lozGR4ewZ0Wtnj1iC7hjWKNnjYmR62W15cLlL3+2pyMR09jccyuyUrHKsvthc5xsY1iWJ0Xk3t+2XP7AnWwrAQmBH6asXubmL1Z5Lz6o992jWiu9lnMSiQsK27FS9NxhCumZgB2fTBPFsFolhZr5B/D3o9sJAI6skAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-recordStop:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAMFBMVEVHcEwimP0imPwimP8imPwimP0imPsimP0imfsimPwimP0imPwimf8imPwimPwimPxLmg1rAAAAD3RSTlMA15sisGUvdz3szYwMT8H+hDJEAAABx0lEQVQ4y3WUO08CQRCADxBQchBiMDE24KswFlw0Wtig/oHzUWglJBZWio3GDjUxlhhrEzT+ABMrO2iptLLVRluDggI+xtmdXW73LkxxNzvfzuzszNwZhhDzdOxqaPGgYrgkOgFczh90ezAJQmpL2v6OHYnqk2aWxOTtAnu/O/Y5XNZXmTZjo3ot7aESwLdFeqAM0MoJkMf9ltwVQJ9PcQN0UFIJogvdJgLQVFMsAlxyBaPmVBDCE8W7qd+2SDsx0q4OwhSrCG134W54jDfKLjDNxaL8/9AAMM/solptRnoALBbwEPWYrOEzLnrZsTGoMW+fBHG2SiLPUNI6KMOH8QS/XsCMBYQekIEv7NGZF/Rht2yqmA4i3UG9O0iTqgMfhirDhRdU8XJZqqEO8tDAqje8IIt1r+I5HmBjfD9AxQ1MgJQRpc6GJRALHOAS1WRlhMs4VaSFzwIWzCUF3op71kdNsNs/FDCuA58YqCQl7IhN3WbDnlLtfjnuON515WM17c7w41QPOuBIzDT5wqi0T4ESGV3gjtTjkuPATwHoX9+cPRlmmvJ57YAir2qKy459QL/UhrS/uAu3xf8KiX3DI+b22t6jc9F/qfaum9E1pJ4AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-fullscreen{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAALVBMVEVHcEzMzMzOzs7Pz8/MzMzMzMzNzc3MzMzMzMzMzMzMzMzMzMzNzc3MzMzMzMxdHBitAAAADnRSTlMA8AwGYt0aUcg+til5kgmZywUAAAGWSURBVDjL1VM9SwNBEB12IcGkMagxNktQSZFCYimIJAqKpZ3Vco3YBYs0NkKQgBg4ECtLFSFVCGgQrERBsLayPE0Ip7n5De73BdKLTrE3ezPvze6+GYDfsAQKy/Rz0i/OZJjY9XREuohTKulS+ZFGiADfyZXU5uHktI44VH5apDx554aXwJqloiIwiEsStAjCAsRNF1jCmGqa4Vc+BvS4JkjwzyPE6xiwzsxxeTbZxqjkANSWZFkoIj5bQBl4aBDfkPDNpeRRialB+SRAFz8UU1sAaEUjSCDoJ7iukZJ1V+c01bFczM1pWaa+a0Rp7MHn4V8Z1R9vLLCv9WjKdVFfk77JP+bZdz35YAfKXx6KhKp93abUYVbrj49g9aAYSuFCLbPUwzdCsYEWTloXgw1oGwQbENeuKwxzXhxwAADRMFd+zzRc6AAASY6RH8VjUHaXTrlOpDgCUP3gelc01e2d+f16cWbnQ46BGCRNVsWAWQJVw2xGfUXVv2k1OsLfazXqblzS99u1FwKFvBJioXBY2+r82U75Ab7O0ypVV0wKAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-fullscreen:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAANlBMVEVHcEz///////////////////////////////////////////////////////////////////+GUsxbAAAAEXRSTlMA3clDHfdfDQbsVG00u4Cqkr04dRsAAAGaSURBVEjH7VXLdsQgCPUBAROdjP//s1UTDUxsz3TRTc+wC94bHsLVmP9hkE8LIdvgupt2tFhcpy2CMZyVxNePEoqDp4hwEGz5PTqWoZNzLrmD9riltBEYzzpZQ8bweXy56Qy6Tivsp5JQYtawuMH3eJlSxIJtdSSa4xer4lMB89pK23mKrwjZVpsRzLq35vEE3+r26h6w5EKppcp3fP2lIpTPWJvoXoobeNI1sM3haHvx56tu2sdElJ54GbnPQ7RZ1sCpl8qPEMXBNQ8vN82jNbzLCPzGSDOZj/2Bqd19R1rELIEbDFrUJfKYdlALbDuDMko/hz7t8DqtCfr8h1Vt4rn7eh/6Ph37ch20aW8McsfGCOXzcr+GOlQG1rJ2HSHUDO/4Mu01qVAqTCpCJfgZ3phTS2pKm5aZOMUbs7Q6nk6L8UzRh6W78jH+gD/VRxHokPuNgUGTaPPR+zDR1mrlvcGgwkAVacSbeoN4Z0rb5/6XLrW/2GTLk2NhXHRKzrqAt9cD4rr4ddvae0NAYgOICdZyvPj4UYRf2BdfbB8iWvnTUwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-fullscreenExit{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAG1BMVEVHcEzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMxi4XDRAAAACHRSTlMAf3WqJmy+7azWNnMAAADNSURBVDjLxZQ9E4IwDIaLfMzCOTDSOz07OjLir5CR0dmJFVHIzzZtnMzbOzcyZHnuaXpJWmO2ji4GhpAzzZeQzw8FKCj1pMHss9OgpwsGOS0YGOcVUMPsfBVkmJoVCBJW0FFB6SFIaMKAlRGDnEiDkuM00lP3SmL6H5QSh+iIr9ZWVWntUV9Z4qWbHAWrhcUYNLC4Wwm3xb1r2mOQYoVn2EKFAVb81KHiQQq6L3vSUoMBUmSzgCKbVeiL3eTp3Odf8H1sxRAZZNZt/Vt8AHcPQbiQQVF+AAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-fullscreenExit:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJ1BMVEVHcEz///////////////////////////////////////////////8NopmKAAAADHRSTlMAe4Fzh7MZbKPtN8gpX19xAAAA3klEQVQ4y8XUIQ7CQBAF0CWhKQHDGtQKQFcURUgqEARBED0Aoqp3IKnoATAEW8MNOACmhQYKcyi6uyjmr+aLqXj56bSbVoh/J3HBxcw0ZfA2c3FiQLGeQcWh1lOVDDJTAeCbCgAR6QqCDj0xCEU5Bl2BIALKMwhdKjGILRUYfCIOEynlvABANk64M5jabJxHfJY2I76yzYu/ZCc0s1WbNYAQ3jxqwMoGHoqWGHpUQajEgcYYcKWFtjLE4FGJQewoxODRDQOqKPPcHl9sfzSXa/0L349tEDsOsp/8+2/xAY+BZBY9KhM5AAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-audio{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAIVBMVEVHcEzKysrMzMzMzMzMzMzMzMzMzMzMzMzMzMzNzc3MzMz8gU00AAAACnRSTlMAL0jMZKt96hGJsSKv1QAAAJ9JREFUOMvN078KQFAYBfCbJEw2q6SUyWxTnsJT2JTJ6D38Gb6n5PpT6BwbuaNfTnz3fEp9fZwAP7czD0MsGCzBYJcEYsGwBEHQQRB0EII1CMAWdAOzyJUvJ4jyDVyZVHKGpj9guEI2IuhaDFadYnCkIm9I+kPgn8t+kI7kOsT72HcwQnJR9Gofy8DrwwtHK8pLzdeALg5fNb6cdJ1fOjOGYrl5CLFcggAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-audio:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAJ1BMVEVHcEz///////////////////////////////////////////////8NopmKAAAADHRSTlMAL0TQf2frEaZYt8E+idL4AAAAnUlEQVQ4y2NgoDdgUcAuzr4Hh4TaGQHsBp3BLsG+BoeE2hnsEkCDsEqADMIqATIImwTYICwSEIPQJFhnBjKInUGSUA2CSPCcOcIwB1ki5xRM4iiDDrKEzQlsEuUF2CVY0jbgkDhjMLQkEkj0IM4gQQ1EHMHOOXMiakRpTiQQtXgTA+7kgzvB4UyiuBM17myAM+Pgzmq4MyfO7EwjAAAEf+BAxqI/agAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-mute{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAMFBMVEVHcEzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMztXryaAAAAD3RSTlMAHd4P7ztyiy1OnKhdx7uY2gyAAAAB8ElEQVQ4y72Uu0vDQBzHE45C3SJFaSGjUEiRpJRCBifXokkpSv0DfOBrEKxUUSoFH9ClFAUfU7c4OARRCs5WjThIsJuDlLo4ldvPe8SYxGyCt1zgw+dev+8vHPcvYybvfCR2OgUv2CoLdI62bUn3gq5UJxOomZofXPUNrIChJRklfcDSiKK2bYQ6PiCaREk0EQoYPFYa3F73F+DGsBJLmAjBAMBKakG15KAxqIhd2KuK5reRccBDNbqGJhfSlnNc/kBh4OY+vaoR5QrRC0afKgxMrKdXNKI8yxRENpyd+i0VA6wMm5IOBmORCwfAFjGwolofevbuPrvpGhRgpVaqiJflrLuUQQFRToS9l894EMDe7q6yL7/G5wMATc5mQCiAvQo3HAbwLtyYHQZgTxhohgGUa/CP12EA16W2/Ou4ZEj1dNG9oPMkZGiwJWQ8j+huPoJy9Z/XHTfUFSbAURveCtE5px5vs7hQDKS25FSDn2KR5Up1/lRmIHne7BvKd82LMRJbSjr5M40lmaYZ5/PRpiCpizjJyk+CwOGOzACLpSdz02QxkiuRhd8dNOwkV7zl2YX1mcySKJrQiPkXY9klSt6rkJPRwIlmzrcWSGyzHuTb11V/N0y/S6SdwdCR4m/47OIN7XOQCfwJwHGp8IcfyRdBLEZK4Uxp6wAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-mute:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAAP1BMVEVHcEz///////////////////////////////////////////////////////////////////////////////9KjZoYAAAAFHRSTlMAbk2o8o0P5TsGnVojFi/XHcV9urLaq0oAAAIQSURBVEjH3ZZZkqswDEXxLM8T+1/rkw0BuwM0f131+IvDyZVk6SrL8p88QCHp8UAnWmoWoG8AwQRN4/vOiqwUuwWqiob646MMhEW+KnMLMIXffgiZoODBusb7kCzjByF7OOszkOyp4amJ7fWVPwCN6BpyWRypvANPCvi7u4aTXe13YI+KkYA5bMRTSMlLnSyGwisSH40B0FJOrKVB6iAw2U7sGgPgQ/DTPQjitCOZT4Q6gWCtGwGFbSM34ENUNSpAKTACmC31rnRgJwjDdkFAJpfkAsZcAJvCSVQB2FaWWKdB/ATMAHwIUYL21lRBZRDiC5AnsBHBUqxEyaraFMoz0AnvHNa6xDWS34FGgNf6PYAEZoCt+BroUWFVbeb5HbCqSvB2cToyca+Adjlycdawt0C/HeyScl1W/w3gsU3aAd7JF8AuAN6isr7NAogXAFeq59Ec7ifAm49hF8wCMWfVNXAeCAkjEGuBr6R5ZGJzBrl4gGniBME+w8aMMyCsNZtfaemnmYaAQ6I3Rz0fnDhHJxc93Vrr7sHWjP3XRtTba2L3EkdFVaPC6YmXxKI9DGF1YPLdiwfD2r37cL5nDVw+uKzU5K27hsXCXPtyC2uyyl0D0p2TY7VaIvFYWU0Dm8TdOXmrVp52HG4lU4K/N38PpS/Kw70dbmb/sC0WCcQwNNzzjrzUj7veUyIIyL/4m/EP8V829O8zh5EAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-ptz{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAAM1BMVEVHcEzMzMzMzMzMzMzMzMzMzMzNzc3MzMzNzc3MzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMxDm1AzAAAAEXRSTlMAHqw+E8It/gjxc03jhGDTlgAjE2kAAAI6SURBVEjHpVbZkoQgDBwg3Of/f+12gtfMbK1Yy4NampCju4Ov1y8ruNyyo9fyCibVlNW6vUu29GpWQwTVdPTernqwffHex0WP4Nro3k+PrMKCvew/PcZ95aoNsY9yua+cGxTZsnf2K7ch4CAdGnWwYx/5LgKZCgxGdrhH3BeqNmnwviFXPZbQJmcM25HJ5n7/Pczr2QqksMJfO7BJOHLKLWFdMiL1QXYRwFagMklb4GB1zS7s8Le3fqH7Q28tVLkKdIzeaOJB2OKtY8KfTQCUa4++dNsLbrqpTR7xguHGt8hfg8P+seuaEkNdBjahzOD7g1cHP21SL2LtMNBKMdTRJjffTX1wVgrsEb1ITJU6QknwYJAcQky2cE02NbzPp70KyKggtpu9xM7Ik+CBIqY+8FrYyRUkrs+MEm1TG6tG9LZxNDNFGDue63TYGgIHqUUcuNhvhyZtn5LnlDq3Juxa4noCuX0scErOpNMDCrVcHPCS5rFGuQR9KZpxnB6CQ0ZO+AIBcTPk3UdbTx1L7jLEEHsMW7ZYm8NlIFyxn8+xdL7MURYEhzd5k2N2TabxIAORwC2r00QQfNSgrnqTJfh7cDmDu9ggtV0EZFr7kGu4KAxtNDln406TLwF9iZCIwn9EfjPuncRHYm5x3NfK3cFAS5kWxljSnTFlPHBAUFg5HsCCVOd9cdwXq+027un2QJlMjqISJutCDceRtTTuT4+1Q3HWHR8cu0fh9sGvADTYH9iLah/9nPz5+/MDJnQfoIVoAnQAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-ptz:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAAQlBMVEVHcEz////////////////////////////////////////////////////////////////////////////////////1bZCPAAAAFXRSTlMAeVoYDS1pBiPv4/g6lkzO2qSxvoixscijAAACi0lEQVRIx51W2ZLcIAwcLnPf+P9/NS2wp5xkq0ziF3ZRC3R0i/l8fvi04oxx89n+RBi1sqB38TKw7lw7lNzDa3U0e57bHhJ4d567HlqGC08eLAj9Gv8XD4/OwtsdhlfCW2s9LfWtuNrMhG1uLWP17Xh14LF4m8dx1OJ9jkm85YCYch5JzDVy81on3MFYkpTNYGmHHloYI+aqlNhlh5ZSvmH1AyBUwPcMX+u/clW3XYvA2YgRGXyZIZX6PXv0l3GlV9Jp9FKcK7mytJIg8xEeBdaBtV4P8tAixexnp73tY7ZBkrk9WEL8t7ZUDqtIA3jXa8sODSeUVqw5aztT+g4QeNAgM0PGTA0+EmfVeUvU0CEW4hX+lt/zseE7jJKDSmi0EUKBhr7EAMKPvNjOA9Fz4Yk/CFgcGRSdkYOHCLRxQfzKRN5SB/b5Ot8SzzQiKqcb6ipFdCfi1JI84OBdxzarhJ8RkpxHOQszl7bpn0GpmnXq6R4OfTkwwlyUWzeQg7rSLNhNUzEUEqJF3DMHeeXgqRIaYwHimLKipK8hMZOYVfJ5oKtIFSgXk5z4KcNx3GOIPGYfPigh9YGHBNQKVKbZhxMnqq+OHfWBHARZfWm1dnfSXfJuXInplse8wy39zkisdQ5kuLY+Bpc5hwGiH8Mx1rE24DFaLqXkfrNVkjk+8JPTPF0bUw+R9MCVuA8ks/xDyF/9orMqpBQeiob5RbIbmv7/TxvoX1Oqa914TsYg7QUWx8Y7R89JQY8mY6Dat1G5GARuMOgYHY9cbL1vNvc++fk67olLk4nrQSl7457OPqdeKn+vEwm4LPzW87DyXvqIyei9znHS+cZz9SxV+Qf8nNbjn37NkDx4+PmCXzHOLUMtjgmUAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-ptzActive{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAANlBMVEVHcEwgmv8imPwimPwhmP8imP8imPwimPwmmf8imPwimPwhmP0imPwimPwimPwimPsimP0imPoLqfILAAAAEnRSTlMADprCJBnU/gXwh3LiUKxAXjJ/ELXKAAACbklEQVRIx5VW2bbjIAwLa1jM9v8/O7Iht+09MwPlpS2VwLZkJ9f1lxVG1joPdZ0u1ZIxurhv8DH6dMpQI5t439GAEc7xYPgTRnAtV7plReRhd5kDP8+fd+wzt70yPhIJzWT7/6DCJCAYLReZPMIuJC4pClSktHqbNpJO1aTmmAme3cvtWs9NSfq52CPhnJNjlbVHwgmWSWGHeQHcKFjj7fygwidf2fa4mbVOuladcnsoDn9/CKKAWf6HEtpANiJvUp9Fgr0SvocPP/sqjOCK9ktpyCAo4Kt/d4n4/6baseNKAt5XrauPN1A4ZLAd30RUI4kNfELVx+yd3lrXYIg1Bp/B/dGs5N8mPsJoVyiwkofQSlm+K9aiwmoQMDoI4wefoK/rhkESrSuV+BR2i6HZH9juLzxCtBmn6rFmB66gNJQwBOSxncT/N3FAlxAoLQfJDy2+6rMLI3aznl9fhOcGSdZzoNhex2K3TXJEVe3M4Zb6QhHuJc5BWakYIqqietcPw6FK+EHofUHRqtLEQ5X8dKVENXXgEpLOpcjwkFI30YFVHE8fyx2ShZJ/ydTKlZwZ4BCOv/af9uMi0DILe4kieynScgPbhai+DRDckTT6N6zJpI3HMghM3BMg+q9pHmwrbR0Q0D5wc0JHPy0E5Ur7Nf3Rk68LnR0NHfO28/T5P4cNr+uLFR7eEdiNxvMRbd5Onlqsis4iIVdme4di3akWOz+3T0aUnFWHa8QZrO51NO69MeLPafrNuBenxel/3d21j8k/jyxf+75OQabFxOtuT94G3Lpjzb0zBn2Dl0ep/waPidQ/p/XB60Dp7e8F/QO7WSJg4zEzdwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-ptzActive:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAANlBMVEVHcExru/9ru/9qu/9ru/9ru/9ru/9ru/9svP9ruv9ru/9ru/9ru/9ru/9ru/9ru/9ru/9ru/+3uxEqAAAAEnRSTlMA5q0cR/PC/ggSKtN5OIhaa5izIOiAAAACV0lEQVRIx5VW2ZLjMAiMToNu/f/PboNsJ5PaKWv8kKRkWkDTQF6v/zyUfO+JX9sP1dKKd/v2acQcm992kbo1R4il7iHIif0BRNtCUOozH4cikAdt2YfjRLTnzN15/8I85wFCI2xDzlnyMLO7R0aFoTzbFGB+jIkYLkyePvmW8d2fs2ZfZuv8It9s6zvV5uSr2LmKb9oVh37QvpjYOcf8fcO3Cb1jGqOU0SvTTd6X2KUBLgKdLzaiDtE2n+isTh8/+AX7E6/Vh/NNSyfVm10RXIe1nxXRapmsDQB7SMnkmE0IxgqvUn0jteEf1T2CHXgr0gjZllGk1HoJL7lD7fRpD20CgJcwQ6EdSh3lLK0zVfuqz2WvPlPJh7HqnCQ4MysxoAYQE0vHub/toRuqzRy5pEUvbg62MzlJQn1MHBe1x7WSAddpNDSN1c9wRMhKfi2jrICwAIsQvHkD7Al41bOrBNC1Y7QdmSg15LDokGETJB/imxYJKfmFMDqI3FjUkDS3NSEDjBaxknRYSb+4nj4kC43W2OKrkqGB8jV2Fq03YiWbhHQTpUV1mMHXWYf8HlNX7ZUQRC6iwBAIK0r0ILr1UxqKsPattAkhwSIiMl76bT/Ft/TbTy2jHQoM7Cz9isHVMb6GILl0H4DGivbw7xNc8rAtiJn3O/qvD6JT/wgs7Y370ZSdWmbZWEKiG1lXImldW7Q1jGMbZW9tUVKZmGjjGvf+EXAqzWiX5OfFSHVp/zjbfGPc3whdcbTF67V2N1e79qYMru2/AnzVYFsdWFt/+nOypv8vQvoHou4gOtSrG5EAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-performance{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAnUExURUdwTM7OzszMzM3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3NzW8hQDoAAAAMdFJOUwC/gO8ra6tUQNmVEKtAA1gAAAE4SURBVDjLzZMhT8RAEIWnpS3tXcWdQ6wgOJIKfFdUICv6AyruB5w4EuSJCwRXgUBWkGArLsEi6JVuaZkfxawguSWz/p7azLdtdt97C3ASagp+PpeXPDjHVx7McMWDCBc8qPCZB4m44YFIeOBh+ckCf9z+sCCYqi8WvA/5Nwu26cfA+JRBs4h7WmX/tr9AXcQHAPdgzEPZh5idTQBOZ3zyiModISKQGxZ7dd85E0SKfMGjM7vXeI/5AE5LvrwpcK9IOrQYB5BlCsFIvjzgrkaSjsBZbmC9XsGsAxfn8gKXpL9sSlnQFBwFTTuaKWXgI5Ar+sfHTrW6DUDO+2jEEtCNQ9wL6gNujMvvdFS3eugxGT51fBXxTvF1wLLngbT0BGphKalAy0MQ3Z4Ha2V7UoMFJKkFVAWcvH4B0OJfd9YsTl0AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-performance:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAzUExURUdwTP///////////////////////////////////////////////////////////////8/RimEAAAAQdFJOUwD2qdfrViyAwW2XORBGtSCsD6SyAAABN0lEQVRIx+2VzQ7DIAyDYfxToH7/p10CbQ+T2s33Rb1MijM3/qDG/EurolD9zSNRggBU0hFelMABmenfADhGkJDhGUFGgiX6I1AAQlDgZK/td8ELtQORiTmIq42I2bYdCD8NjzPmYQxO+rbn2S5qzEUF6YjkgcJude56X3s0SoL7LRAeylyaVPiFq0Jyx230mpeXmXVOHssRbhackhCq25Sny++xcBW1n6Snqw6BTgrqRP6kzfDcgmRLGonDWSf2OecwB491dOrEVZpNsxiyiHzWJ0Z+BbBwzaoWVb49HrqVfSkxHen7WNznLhzl8xpQlT3E9oHUtYaufB+7NY9g9ctbjFfj/h1DwXUrzDltMn1Ql9M814EQWPLmEBg4R7JSzpFQxTmSBMkLvJKOhFHfKEEnv3L/+qw3DuMPzAFH9pIAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-performanceActive{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAtUExURUdwTBGW2xKV2hKW3BKW2xGW3BKW2xGW3BGW2xKW2xKV3RGW2xGW2xGW3BKW2+P2IvgAAAAOdFJOUwC/gCvzq1RAleURc9VmrA9rXAAAAT9JREFUOMvNkzFLw1AUhU+SxtTahnZyC+IsIeAenDuE/oJQcS/FTYTi6hKcHYpDZ/EXBME95BeURpM0Me39Db4Mgk/u23umcL8kvHfOucBBKEr5eW95xoMjeuBBlyY86NOQBwt654HrPPHAca/YuUHrDQsG9eybBWa1+GLBqvS3LJiNX0vGJw/R0N6LJ+/f6x9I3uwG0D+luRU0FnmdCtBy6ZM7KvQdjgXwJYuNZVNrFfqF8IX+nFm/oBvyS2iZ8OWxgH4u1IZmU4kgfoa5E77c0jwhoTYCbTRFGE7QrXFCveCURkK/2cRBCp2gFYiyWk7Jw4AgXOmQZMAqa9uAeCP+JsViihtb9OKIPtBUuvy8jeqyHRpMhvc5X0W6Lvg60HrPg0DREySOoqQJKRbByRWbExaqlSoVwB0rwCLFwesHquttxhcsa64AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-performanceActive:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAnUExURUdwTH3G63zE6n3G7H3F633G7H3F633G633F7H3F7HvG7H3G7H3F6xOzy+EAAAAMdFJOUwC/hKbvK0BT2XURZct0z00AAAE3SURBVDjLzZMhU8NAEIX3LiSUNqI1qBMMChFRgYxAd07E4CI6HQwzEUXgKjC4CH5ABAKJyA+oaHqBJO3+KPaYYSZh9nyfyuyXu7l77x3ASSgu+flYX/HgHOc8kC4wwikPMnzhQaTueKAiHvi43bFg0m4OLPBM9sWCjyb9ZsHm8NkwPiUQT8MjfSX/fn+F/C3syJf9YB7oLsDkjIaiHix5QCNbGBmAFPve+3lXi/0viLB3ZnmDz5g2ICry5daAvCbZhSE2oIsFeC35ssJljiQbgZitIY7nIGu4wLG+xBnpL5tClxQhCANR1Q5TSmCCQK7YjftOVbYNUOxot0EsHl0uwHdFfcD14PJLG9W9HfpMho81X0V8MnwdcHvkgS4cBcqVo6QKHQ9B1a6XY1zAcSiIFg6QlXDy+gEd714RcAqEowAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-face{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAeUExURUdwTL+/v76+vsDAwL+/v7+/v7+/v8DAwL+/v7+/v96T4QgAAAAJdFJOUwBAgSCbs9hZap+hQJoAAADvSURBVDjLY2AYRkAJCAygbEZBQUEBKJtpJhDAOJpA9kSYxESgKphuIaBuhASqyYzESjC2hDRilQBZ64BFgm2qMLPYNCwSkQpAwlMBWYIRzJsKYrKCxZSRncKSDFYzBSNY2MD2MqRhSFgqgClPAwwJBxwSbAk4jGKZDCKZp2JIsEOcOx0ztjQDYL5ENwsYJKrTsMSvGSgepzVg2pE5qSikPRPT8sopBuDod0ATZ54JsbZyBrJgAVAp1Ax2oPXMykjxYQaLA2CMoERUZQDMrgBUCc8CKIdzAvHJhyQJpLQLSu0TCaV2lPwBSu0KQy+LAwBuJj5UbruNggAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-face:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAnUExURUdwTP///////////////////////////////////////////////w2imYoAAAAMdFJOUwBAgBvfrFmcw4wrbtGliFoAAAD2SURBVDjLY2AYRkAJCAygbEZBQUEBKJvpDBDAODpA9kGYxEGgKphuIaBuhASqyYzESjBu79yIVWIP0NoCLBIsR4WZJY5gkYiZACRqFJAlGMG8EyAmK1hMGdkp7M4gkus4RrCwBIApFwwJmwlgqsYAQ6IAIrEAwygHHEaxngJbfhRDgg0sxHoMM7Z0AmC+RAPsJ4S5NE5gid8VoHg8sgHTDh/PzM60HkzLc44bgKMf3RKuMwoQ+dNIgswJDAycUDPYjhgwMCsjxYcJLA6AMYISUTkBMLsaUCVqEqAcngPEJx+SJJDSLii1HySU2lHyByi1Kwy9LA4AqflRBKNSA88AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-faceActive{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAhUExURUdwTBCT2xKV2hGY2hKW2xGW2xGW2xGW2xKX3BGX3BKW23povfoAAAAKdFJOUwBAgRybs9hZLG6hrV9tAAAA80lEQVQ4y2NgGEZACQgMoGxGQUFBASibaRUQwDhaQPZCmMRCoCqYbiGgboQEqsmMxEowTgmZiFViFtBaBywSbEuFmcWWYZGIUgASXgrIEoxg3lIQkxUspozsFJZkEMmxBCNY2MD2MqRhSFgpgCkvAwwJBxwSbAk4jGJZAbZ8KYYEO8S5yzFjSysA5kt0s5YKc6guwxK/baB4XDYB045Vi4pCyrMwLa9aYgCOfgc0cY5VEGurViIJMhcAlULNYF9mwMCsjBQfZrA4AMYISkRVBcDsCkCV8CqAcrgWEJ98SJJASrug1L6QUGpHyR+g1K4w9LI4ALk0RHtSETFcAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-faceActive:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAhUExURUdwTHzD63zE6n7H7H3F7H3F633G63zF7H3G7H3F7H3F68TinBIAAAAKdFJOUwBAgR+Z31ipwW4yAjb9AAAA+UlEQVQ4y2NgGEZACQgMoGxGQUFBASibaRUQwDhaQPZCmMRCoCqYbiGgboQEqsmMxEowTmmZiFViFtBaBywSrMuFmSWWYpGoUgASXgrIEowgHtsKEJMDLKaM7BSWYLCaJRjBwloAploxJKwUwJSXAYYE2KUMVRgSrAFgKhTDKPbFIJJ5OYYEG1iIfRlmbGkVwHyJBlhWCDNrrMASvxageFw6AdOOqKCklrQuTMuzlhiAo98BTZx5FcTarJXIgglApVBr2ZYaMDArI8WHKSwOgDGCElFZBTC7GlAlvBKgHK4FxCcfkiSQ0i4otS8klNpR8gcotSsMvSwOAIs+RIlIrewIAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-object{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAA51JREFUaEPtmT1oFEEUx9/bnRALGxE70btkZjcR7QMiRisRbCzEQvxCLGyEhCDRBC/RoNaWFgYLFUSwUBAUDKKFFqIQ3OzNJpfCyspS8HaejNyG87J7t7N3l03gttydefP/vbcz8+YNwhZ/cIvrhx5A3hFMjEAQBDwMwzOmAi3LWhBCLDTr5/t+ycSubdtLnPOncX1iAfQAlmXdNBmkru2RVgBSSspg+5kQ4lRjv1iAIAgqRFTIMIju0i2AWNvrAKSUowDwLqP4tACm9rWmbABCiFxXKimlhu0BtPFHtdc19wj4vn/Wtu3XnPOfWVByA5BSngCAuwCwryb8Hud8EhGNltANB/B9/5BlWXcA4GCMx/8g4jjn/H7aaGwYgOd5BxhjcwCgPd/0QcRVAJhM2l3rO3cdQEq5GxFvEdH5VsLX7aKIH8MwLLmu+zapb9cAFhcXt/f3988AwJip8Jj2L2zbLg0MDHxr/NY1gCAIpohIJ2V2BwC0iUd9fX2lQqFQ6cQvtJbICSGOxHhlVOdKjuPMl8vlqzrxI6IdWUAQ8Um1Wp0ZGhryiYghYrUtgDQionxJT0al1IwGkVJeBAANvieNDQB4joglzvmilHIXAIwrpX67rmuUamfKcxoTvnoQ3/dP27Z9g4j2J4C81KBCiC9LS0suY2yMiC7rttoZuQBEQhsiopfT6wAwUvv+hjE2XSwWP0kpRxBxgohO1kPmDhAHEgTBUaVU1XGc98vLy8eVUhoqbpPLPwIx6/wqEV2ovX8AALzZ/Ng0EWgQGa1iLQ8xPYCUyx8YHjt7EWjm2I7sAy0it7ERqNWEDkeiklIJg8pFWwC1ZC6SM9NYc1oXgcaiVlxVYmVlZW8YhlMAcCnFvOkEQPqqRBqASLTneTsZY3pjapZSpwF4Y1nW7ODg4Ie202kTgGiwSqWyrVqtXgOA6Zj0OhGAiF4R0azrup87dqDJAlA/eLlcnkDESQCI0us4gOdEdNtxnK+tfkHjA027AJEgKeUVANBROVd7p3fixwAwJ4T43kp4nR2zylynAOoE/JuAiPiDcx6kFb5pAEwF5zKJ2xXZrH9uc6BTUD0AAGh639Xo6bjUI+G/NglS+p3Y87wCY+y/2ozJSGkuRDLekQFjbEexWPxVryfpjuxhllKhNtwtAESc55xHx9M1hsR0WlcOlFLHTLyv26Ypi5hesyql5oeHh3VBeN2T6TxgCtXN9j2Abno3je0tH4G/KbtRT7VUKs8AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-object:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAAwdJREFUaEPtmTuIFTEUhv8fLSxsFrET3UIQRHtBRNdKBBsLsRBfiIWNoIisD1xfqLWlhYuFCiJYKAgKLqKFFqIgoiBoYWVlKbjwy4HMJXs3M5Nk7tzZCzfdvZPk/F9OHicnxIgXjrh+jAG69mCpByStB3AgQ+AcybmqdpJmEvv9SvJhqE0QwBm4lGikqD4VAaCMvh+R3NffrgzgB4DJDCPWpC2AYN+LACTtAPAqU3wsQGr/pikPgGSnO5Ukgx0DNJhRzZp27gFJBwE8J/k7B6UzAEl7ANwEsNEJvwVgmmTSFjp0AEnbANwAsDUw4v8AnCZ5O9YbQwOQtBnAdQA28nXlp/NG8HT1G7cOIGkNgKsADtepDnx/C2CG5Muytq0BSFoJ4DKAUxnC+5s8cSCf+j+0CXDBjAJYNgAA6+Keg7AwpldyAXqBHMmpwKjYyThJclbSSQBWfyIT5IF5kuQ3SctJzjcCiBHhxUu2GM24gRx1IGtj+gDw2I34Z0mrbXcC8JdkUqidFecEAj4fZD+A8wA2lYA8NVCSHyRtcGvouKtrg9EJQKHVB7Ht9ByALe7jCwAXSb6TZP+dAbC3D7JzgBDITgDzJF9L2u2gQoectV0yAD7IEffjDgC7plaVJQdgYotdLOYSMwaI2gETr51jD1SN6qDOgSobw/WAywltLxRVhBIxi7LxInaxUCHHFvmCpFkorWInoR8LheqsA2DB3LGIRdPIAznBXC1AIVrSKncwVYXUMQB2Sl8h+SYQOCanVaIBPJAVAM5aqBAIr6sAnjnh78s82aoHQkYlWYwz7YXXIQCLRK+R/Fg3BYcO4HnlhPPKIfefTYX7dm8m+aVOuNdP+1OoSow79KzKL5LfY4UvGYBUwZ0s4qYiazzY7RRqCtfZIm4qfGBrAEDle1e/0FDoUTKvUxiT3gfsaWlBbibFUsyDiKSkBK9nf4LkH19P2RvZ3cxUIVoEmCVZXE97DFXPrJY52JUy+lY3Ji2S8cxq4i3jsahk3QdSodqsPwZoc3Rj+h55D/wH5CHfQHNA9EUAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-objectActive{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAA9JJREFUaEPtmU+IE1ccx7+/N7O0BwUn24UeRKOd7LalvS+I6HoSwYsH8SCtLf1jEqWoyLJVMfEP6nlNIgi6eFChCB4UChaU0h7aQ1GQrnFSdw9e/JNs9LTFzPvJZI27m8xk5k02m13I3sK+33vfz/u9N+/7fo+wzP9ometHF6DTGfTMQF+mYNqwd6sKlFLcK++P3WsWF8nkUyr9stAeTcXN624xrgDVAUgcVxmk1lbaGPIFyFocou9fSonYzvo4d4BsYQLgaIhB0EYA174bAFaNWpuFhrthxDsxQQCMrKXUPwGbvfr2BSglYh39UjmwXYCwy2kh4jqegd5M/itdaL8+i5vPwwB1DMDIWNuJcBbA545wAs4V4+YIiJQ+oYsOYGTzGwniDIANLjP+hkCHiglzNGg2Fg3AGB3/Ugj9NBO2+4ujSSaMeJ2uc+PbDhC5aK1GhU6CeY+/8IZz9E+CnSomBn7zim0bQF/m4QqbPkgDOKgufH4EAzdZaKny3vUP6vtqG0BvtnCUwY4p01oFcOIZuCJ7elKvvo9OtLyESMN7IzeViA3VC3TshiY4Wkz2j0Uyj3+aMX5shAIhuia4kn6Z+DSPu6xjiCotAQQRMeuXaJJYpqsgOetbcBV8TZA+ANxgotRU3Hz48SWr7800DjHL6VJyQMlqh/I5jYZvFsTI5XcRa0cA/sLDv98iieMv98X++Sj7aEBCPwjwD9W2LNMdAqhJnQMyc5D9DGBwRhzukK4fK/647q9I1hoE6DDAO+ZBdh6gEaQ3V9hi27JS3tf/u3Hhv20kpQPldsgthQw0fOcnpc3fVK2EhosEmE33x9LJwKxM54Lj/Ap0SeoCBPz+qVw7uxnwmdQFOge8R1n0DDg1ISaxqSbJy0oE2pTvqhStbOK5FQy2ka6vOTVkoL6o5VaVWHX+yVoh7KMAvvPbNq1mQNmNBgGoiV55fry3R+jOweRpqQMBMO4wxImp5Cd/tGynVQBqg0UvT3z4eroyDMaxenvdHIBvE/GJYnzgb69MtjUDboMa2ceHCTQCoGqvPQBuSOZT5WT/fb8luOgANUFGxkoQYVja+Lq2iQm4ygKnS3tj//oJf9+PamUuzBJqJsY59Jz/9wh6+iJpFoIKXzIAqoI7solbFdksvmN7YKGgugAMNH3vqp9pN+vhtq5VMqT2PpAbjwrW59VmVAYL8iASCfdGBvm/bpQPrCvP1eP+RpYrXA5XKgTaBkA0Voqb1eupL4DToFo5YLlVZfadtkHKIqrPrFLIsXL8s0k3LaHuA6pQ7WzfBWjn7Abpe9ln4C11Qo9Pmb2aMgAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-objectActive:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAA8hJREFUaEPtmU1oU1kUx//nvthx4cJqX3RgGLsQBNG1tq8Z7axkwM0sxIX4hbhwIygibfPMeyapunY5iymzGAURZjEDAwoVm8SPhSiIKAjtQtAmiooLiyb3yEtNm76+5L37kjQp5C2Te879/+65H+eeS1jlH61y/egCtDuCNSOQyH3cWirNHVIXKO6kYvqdenbxqVlLxS8J8Txp6Ne9bDwBnA5IiIRKJ5W2LDHsB2BmC6zqmxk3UkP6AbedJ4CZfTsNcL9qJ077VgHU8r0MID5V2EsCk2HEBwbIFdT8M/aGBkgaelt3KtOB7QKEnU9NsGt7BMyp2cOa4P8tY3M+DE/bAC7cK+xnxmUwtjvCCXzFHtRHiEhpC11xADOTjwG4BCLDY8S/MvhsyoheDRqNFQMwc292QmppEPb7i6MZgEdqna7V9i0HSNx/95MscRLgo/7CXS0IWS6VrFRs8+1ati0DSEzm18kesgGcURa+3OAfSGklY5ueuP9qGYCZzceZySKC1gQAgPkvXtNjpXatn258CmlYSOSSg/qwW+B8usH9SSM6Ec/lTxM7iR/3hgEh4Jpk2Kkh/UVikiP2MBUbAggiYjFfchajtB0QM/fuOFg64D8H8QHQTQ3CsowNT8cffdLnPn8+KyXPpWKb1FLtYJ0tbbU84asCyRYOEmiMwTtq+P63JJAYH9AfxTOFbUTlNXRyPlmTdpsAKlIXQZyDDJJGGbx7/l++BSHM5EDfg7HMm91CRM6B+fdqyA4AWA5iZd/++hWymDaid+OZ/G8EGgXB65DrhAi4Jw3NsORj5VRC4A8AW+tN2Q6KwKJM54b2HcD3EtMFCLojqVw7uxHwGdVQ992OjkC5JqSJPRXw2qlEsMpFo1OonMx9/7gE211z8iirLC1qeVUlxrLvtwgU4wBO+K2bpgCoVSX8ASqiRx682hgp/jBaL6UOBsC3pMTFdCyaaUI6HRyg0llicnptqWfdeTBMd3pdH4D/kyQupgf7HtaKZIj7gDpAdefxzOw5IjECoJxeewEQ6CZBpmwj+thvCq44QEWQmZ09BYjzLHFk4SQm/C2KIm3/svGZn/AFP+qVucYi4BbmbLnOb5rGr+zB6MugwjsGQFVwWxZxoyLr2bdtDTQLqgsAQt33LvdIe6UenvNaJUQqJ3Hi3ut+KSNLajMqfQV5EAnzRuZoEF8ivfZw74dqPbXeyP4MVSoE0DoAmkgafeXrqS+A08CpHBDTPpXRd9oGKYuoPrNqETlhD/w446Ul1H1AFaqV7bsArRzdIL5XfQS+AaeCtE+rbksUAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-occlusion{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAACCVJREFUaEO9Wn2MXFUVP+e96X5oa9UoYpUszrzzZlqLKDYhgAFaaKU2+M2W9Q8TNUQSQEsiRUjFVaHGNmoBMTUmaPwD2iIhqKR8WEBFGkLFUjbtzDtvpjYrVhpCXAtm1867x5zxvnXm9c28mR13b7LpdO65557fveeez0GYp1Gr1d4VRdF7lL3rui/n8/lX5mMr/H8wDcPwIgC4QkQuAwAVWv9eA4BJy/8sAHg7ALysf4i4DwAe9Tzvj/3uP2cA5XL5CsdxNiLi5QBwUkQeQcRHrNB/JaKZZuGYeRAA3gsAZ4nIBkTcAABLROS3xpjdpVLp0bmA6RlApVI5FxE3OY4zGkXRdkTc6/v+c3PZPAiC80Vkveu6Nxlj9ojIjmKx+GIvvLoGcODAgTctXbp0CwBsEpGHEXEbEf25l83a0TLzh0RkMyJ+AgB2TE1N3b5q1ap/dcO7KwC6AQDsRMQhAPiu53m7umHeK00YhlcDwC0iMg0A13ZzQJkAlKmI3A8ADyDi9Z7nnehVsF7owzA8Q0R+BABXIeJY1mF1BMDM3wSAcRHZ5Pv+nb0I0i9tEARfRcQdxphvFYvF8Xb82gJg5gsA4FkiyrylfoQVEScIgttyudxLhULhwSQvZhYAuJCI9qftkyrckSNHzs7lckdFZJ3v+0/0I2CntRMTEwNDQ0P3i8inAeCUPmLP8/Y2rwmCYC0iPl6v19+3fPnyvyT5nQZAmQ4ODv5aRF70fX/zfAk/OTk5PD09rcbg4/EeiPgPEfkUET2dALENEc+dmZm5cuXKlf9unjsNADNvBYD1AHAJEf1zPgCUy+UljuPsQsSPpfD/m95Is29h5rcAwO8AYC8R3doWgDopx3H2I+IXs17/XIEdOnTobcPDw3ry6zrwCKMo+kypVDoU01hreK8x5oJmZ9dyA0EQ/AwAhnzfH5urgJ3WVSqVd7iuu8vGTB23SLM+QRCoOZ/2ff8Ls2oXf9DYJpfLPSgiH+nGgfQKcGJi4szBwUE9+Uuy1orIQREZKxaL5WZadaiI+Ey9XtfbacROszfAzL8wxtQ62dysjdvNM7MGcSq8Rq1ZY/+pU6fGVqxYcSyNsFKpjDuOkyeizycBHBeRjb7v/z5rh17ma7XaSBRFuwHg/C7WPYWIV3fy9kEQXIyIu4no3bMAqtXqZcaYnUREXWzSNQkzFwBAhf9wF4v2RlG0sVQqncyiZWZ2HOfaQqGwr6FCYRhuE5E3E9F1WYuT83qluVzu74VCYWfzXLlcLuqDBYAPZvEUkYeIaBQR61m0Os/M9yDiG57nbW4AYOZnAeDWpAPJYsbMNwLAD5TOdd2L8/n8Hyy/FfbkV2bxaKgB4s89z5u1LFlrmPlSANhKRBfGAPTBrCGiatbieN7GSpoaDut3IvLa4sWLi8uWLXvVbvBUFi+NchFR00/V6+16ollr7AGpaj5JRCMxAL06VaGWNLATM2ZW4dc00yDinZ7nbdLvrLXQaDZ1iMi9vu9/ySb/BwHgTAC4kYh2ZIGw6ekbRJRDy+AlIjoja2E8X6vVilEUtdhoOzdBROdY4fcYY0Ydx0kDcQ8RXd90m+qVH9P/qwPTf9WcK5+YJmnemfmE67rnIDOfBwA/JaJuLEWDHzNrAPZwGmCNGnO53HYAWA4Aoykgvk9EX0uuDcPwBhG5KwahAjOzqqHqewNYMwhm/hMAXDMnAEEQaP76vTY39v4oiiZd1/0VALyzGYSI3OH7vubVqYOZ9fE3NEENin1LDdqkgZkFMBcVqlarG4wxv0mR4nUiWqLfHz58eGTRokXfdl33tnw+n+pVu1XZNLpZFbIq0fKIm6yIOhX10K8g4t1E9IDSa84wMDBwHBG1WNU8NIObDReY+RYA0EqDhhJqbQ4SkRYI+hotj9gCaDGj1Wp1lTHm+ZRdvk5EDdVh5h9qiaWZBhHXe57XCLKYWVXoygSPk0SksX1fw3r4FjPa4shsVqanP5DcyRhzebFY3Hf06NG31ut1fWQNT4uIWzzPu0M/d3gjTxPR6r6k/+/htDqyMAy/Y4zRPOCmmLn1zprYJ8eriLja87yJSqXyScdxHtKSi4YCVvgxRLyvjZCpFqhXQEEQbHccZ9rzvG/EsdBFIqKmVK1AYzQJl8Z/NH4PzHxV/NkCuAsRb0hb1C4x7xUAMx9GxGu0ONycD0yKyGcTuegLAJD26K4joh+nbczMe7QolZwTkZt939/Wq7BJeq2nIuIviUgr3v9LaDSdFJFjKR7vtMcYOxXVRWPMpY7jCBE1PGiz84k3F5G7fd//Sr/CW80YR8SROK2cvYFOKSUzr0PED2hGZYw5ISLHrafUx9QI2uICmFY1RORsx3FmEPE5EXne8zz1mn2Pjiml1d+ekvrmqHO+K3hWvvZJvb2ensoqCwmgq7KK1eGttnuyNqsSvVAAtGINAE9oF6hjYUsB2EaG6vWx2La3U96FAmAt28jU1NTqZOMjtbhrGxovZJXVFwJAXGYHgPPS6lVtS+dxY6PT41wIAFpe79To6Fj7j9PCdmX2TvF6vzYzLqtrgyX2MWk8M5sXcaNDRLQjeft8Vaxj4bQSLSJbEFHjsraNjZg+E4AS2obHTzRb0kxsvirXVm1vBoAT9Xr9y2kNjeQtdAVAF9kQW5PseW+zzszMjCcbGe1UsmsAMYOURveTc62nap1TRNYsSKM7eQIaO7mu+zkAWAsArwPA45oXdPNTAxutaillsTqoKIruW7CfGqRdpRaHReSj2lvo8scezyDiY1qc7dda9axC3W64UD+3+Q/fZENVhTDr2gAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-occlusion:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAABphJREFUaEO9WmnIVVUUXSuiuahopOLLIqTBBgtCC01Ty8JmS/sRVESBNgiVJVZaaag0WBlGYNEPrSyiQdTKskEjpNKSCBqlzAqJSAsbYMV67vt13/3uffe+93pvw+Mb7rn77HXOPntY5xEdEkkHAjgk1G8k+VMnpuL/oVTSaQDOBnBmGG3DfwHwXeg/DMC+ADbGZwWAZSRXtTt/ywAk2eDLAIwAsAXAkvjY6O9J/pk2TtLOAA4FYDDnxmdPAG8AeJbkslbANA1A0gkAbgJwKYA5AJaS/KCVySWdCmA0gFsAPAfgIZLrmtFVGYCk3QBMDeNfAjCb5MfNTFY0VtJJAG4FcL5BALiX5B9VdFcCEBPMB7ALgPtIPlNFebNjJI0DcDuAbQCuq7JApQBC6SIAiwFMJPlzs4Y1M17SAQAeBTAWwPiyxWoIQNJdAKbZbUjObcaQdsdKujHcaTpJ25ArhQAkDQKwmmTpLrVjrKQdANwJ4FOSL2R1SRKAwSTfz5sn1zhJhwP4BsAokq+3Y2CjdyXtBMDueRGAv32ISS7NhN+RAF4D0I/kt1l9fQCE0lcArCPpyNARkbQrAAeD81IT/ArgQpIrMyBmA3D4HkPyr/SzPAAzIzYPJflbJ6yX5ARm48/J0f+DdySdWyTtBeDtyDlTCgFEkrKvXVV2+lsFJmmfMH5UAx1fAriY5CfJmIiGCwAMSie7uh2Q9KRjPcnxrRpY4vP7hfGumcqkT/SR5POyjeSVycu9AKK2cRQ4vUoCKZs9J5ocFMYPrfDu2sgBn2fOgjP2e7E7tdopDeBpAF83irkVJs4dIslFnH3eVWuZ2IWdwDbkDZTknHAEySuyADa5uiT5TtkMzTyX1ONqE4ALtzJ5C8C4Rtle0pCoXg/uBSDJPjmf5FFlMzTzXNKRYfzJFd5z/PcCujRvKJK+iFppRc2FJDnO7k5yQtnLOb7tLf2RpIu9XpHUP9zmxAo6X3R5TvKfCmNt7zwAvztPJQBWA5iSTSBlyiRNAvBAjBtC8t1YkGNi5Y8r0xHPn0pHlrJ3JJ0BYCbJwQkAH5jhJL8qezl5HrWSW0NnVItbyP4kN8cE9ucycVh0+2m/nlM184drvkmyJwHgrbML1bWBjWaXZOOHZ8bMJeluzdts13I1WyQLSF4dzb/DpsPsJJJuaBpKtKd2oR0ZClwJug6vJOHfdTE6XlxPckAY7xbRbWceiHkkJ6Z201l5efw93T8dzkNP7d/Z8C7JfckAAxgI4AmSVSJFTZkkF2BuK/OkX/TKRweALIj7Sd6cfVHS9QAeTkAEALuh/d1Sl5klfQjgmlYBuEqdVQDg2KBTXgawfwbEDJLuq3NFkg9/zRMcUOIsJTuQrVB7AZiAataFTIu8mmPFVpKuNL1LTmB3u1kpyqqV/LUY7HYXisnqDnEqijipOEObVXuEpPtiG+dGxP83WZUWd3C95YIkN+hmGlxKONqsJel6pi2pO8RhUF0YlXQKgDU5s9xGsuY6kh4MiiU9bHRCUEmyC43J6NhC0rV9W5IXRusSWaywV98rnZURJFdI2huAD1mSaaeSnBHgis7ISpLD2rJ+++L1SWT3RB9ghqwmkgzKjX1WNgMYRnK9pAsAuAxYTNLRxu+5l1hYYGRuBGoWkCQzgu4L7kjOgP3WodRRIAGQGJen33VLch7GJr8HAIdCh8Q8yW3MWwDwWS2EkqvS/YBJ2UsyvehHAPIO3QSSj+VNLMkJzKRUViaTdNHYlgSf+jxJk8R1DY3byQ05GS/vMNaSSvii/VEkaxlUUjr5JMY6gt3QluX/eYZLlJ6k+KvUUkpyqj8+OirH300pALWiLSHAJJnVMK/kusqs9RqSTjptS3C0+S1lrF5TTX266uw0gxf2FTf1McDkUWVapZsAKtEqAcIu4FJhZBkT3S0AwVib4lxCspjYCgC+yLBf+0DXYnuRdBGAI5trK+efuouPInLXodMhtCGt3g0AKZp9YB5f1Yhe923JokaHs0sATK8XXnSUXXAkbWEuzd6oXm83ZkpKaPVpSY7J01l6eZFcdESX5cu3jjDWiXHBRLvpcV1WeLGRjC8FEAfbienx6JZmdZC5tttOBuBkeW3ehUZ2FyoBCBAurWv3ZdEPd/Ka1W5Td5FR5JKVAaS2OHvRbX6mJT41eE5TM52/6M6uQNDxlzvhAdga91gusat81cDVquurPQA4QS3s2lcN8rYyyOGzfLdQ8cseLsiWu7NrN1o17UJVJ+zW123+BfogD+TkdLQFAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-occlusionActive{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAACItJREFUaEO9Wn+MVNUV/s59+wOqyM7bFaxAsDBvFqnWak0MYhBQUEqUVssi/aNJawg7bxaKScFqqKyt0gip4sK8XUKqjX/IL43RlqBSxLZWYqwWdWNh3oIlFClkebMUaXfZefc0983MMjv7Zt7MLrvvn93MPec753v33HPuPfcRhukZt+3YeCndCQpeCO3kmWVTTg+HKbocoKHWjpnEuBfMd4GgnJ7ADAcCJzx8iUlE0AGcBOMkiPYz4c1kNPzXodofNAE9fvheQCyBoLvBOA/wHunSHq7CiXM9+BdWGj39nGuxq8dWYyJdxCSh8UKAFoIwBpL/CMidTmzam4MhUzaBmpYjNwmNVoFEA9jdSEx7zzZFPhiM8dotiduYeAFIWw2Wu6TLm7pW1n9SDlbJBK7d+rev/c8du5aAVQC/Lok2dEWNv5djrJBsTat9s2BeA9AiBjaN1s499eXyW/9bCnZJBNIG0AbQKCb8OhkN7ygFvFyZUGvHQ8R4DOBuSWgs5QUFEkiD8nYAuyuJmk5Hw2fKdawc+fGtHeN6mbcAWMxES4NeVlECequ9DoxmMK9yYpHny3FkqLJ6PPFTEG0CyyedWH1zIbyCBPQ2ewYk3ndMI3CWhuTsOhb6uMQTEBWfOdGpr+Zj6ZbNELjdaTQO+tnxda6m9R/XCa74AoLnO42RfUNysJjyuvYqffyo7QA/AKCXQYuSZnhvrorelpgHSW9LSn2jK3r9P/PhBhLY1V6ld1b/HuBPHDOyZricn/jsidEXRnXvIOD+SzaoS7r8/a4Vxrv9SFiJDQDd5NT13IeGGy7mjg0gELLs9QQsQAp3OiuN/wwHgbrfHh4ju8UOEH3XB/9LkvxAbm3RW+yrUIE/MbA3aRqPFyTgFakKcZCJfhK0+gdLbKz1aUjDaJWG5xfCYKADcB9MmtM+zcpksuELMiVn5Ba7fjOgxxMvgjDKMSNLB+tgMb0xW4/UVUpth7dnCnp8so9uJbaD0e3EIj/OqvcR8PY2VPGqJL6jlAISZD9//Op4+zWSqncwcGewLh8il5eeXVF/OFc2XVDpPXDqwezeqY9AyLJfIpbHiuXcYMP+EnqLPREVUGEzMxCDcFC6vUu7mqYf95PV40eamcSUpGn8SI1fmgHLPiUFL+lqjPw50EgZAjVbjk0Wwt0J4LYgNQYOVBE9VKza17QlZglJOx3T+Hofgdq2o3dJKduSpmEEGSlnfOxWe6pwsZOA75Sgt1dUu0s6H552Pkg2ZNm2EKLxbOPU/d4M6FbHBma+IhkzYkHK+ePpKa34d9Kc2pY7VmcdrpfQVNh8OxCT+TVnutGAOZQKlAUQittxIrrgmOE1aQJx+30p8Xh+AQkC01vtR8B4VskxtFlJc8pfPLw2ezokVNjcEIThjRP9zomG+zJLkE7NZnu2EFjvxIzbMzNgH3c1zD233DgapJwd17fYM0DYD8Lo9G/s9GpX1p9ffm2nZ0DDgUAstcsVNAGMWQBtVG80UAeACk3NxTuOaUzOEkg5KVwx4BhYBE237P0A5vYTIXreiYZXpWf1SDNIrCsIwfyCE4s8rA7/qV73EIBrADzimMamQBItdrVegQuOaVSQAui96H6WjBnjAhUzAnXWsXoJt1+Ozgy1O6Zxo+e8JnbBlQ1+JBiIJ02jqW824/Z8EN5KT6R8Uv1R6dzDyTz56T0Ut89UVmk3Ut1W+xZXYlsyapSSKTy4Osu+XwKv+xFWu0aBio1gXA+BhgEkGL9xYsbP8nVrrY4VDG7JklAOhyz7AAGzc3/L6oVa7Y80gWWDIhCKJ9YQ0TO+MybwTVHpnnB7tDcIuDqXBDM/nYxF1haaabX4ZS+8SFAJRa2lrGx+gukjMJgQCllHFxLkH3wc+coxjTHq95otn08mUflLltoTXU1TfKtqqSHrJ9cXQt6Cs+x+izgni6iicgrg0wBtdkxjtwfmHUSqTgGkmlW5jzrB9W0Xai37MQYWAZioml0ADjmmcfNQHPd0cxdxhkC/NFrXdvRWKeWH+YYY+HnSNLzQ0S37OXgtlpyHaYETC3sNqlrLfoOB+/IwzjumcdVQCQxMo/mFzHvD1ertV+UbIynvPttUv7/muS9qRHVK5Xqv0hLT2rOx8NPq/0JrhIF3k6YxZ6gEfApZx68Aqc4Bq7PgqjqDMMPHWCcTzUlGw+211pHvMcRrquXimEZDxvmlRPSyr5MFMlC5hHQrsREQ3Y4Z/oVXyNLNWd7mmMb0LFiOc374Ddn1oFv24r61obCsRAuBVvgpFTqYl0/A/pyJlqnm8KXtdKt9glz+Qe5ZtNayP2ZgwKJjRiwZMyw/w7pl71JNqQHrh/nRZCyyoVxn8+W9fqpGrzhRY5IXupdCJvEiwMfzK57vYswc99KxKGdDCHaihldB+xWfDDiDNyfNyMqhOu8lD6860+TssbKkI6WuSj3oW0yYSZBnwHxKEc3dtGUbYF5Xg/k6CNHDTB9o4A87zfBHl8P5okfKNLvyDvV+BC6Ho4Uwih7qlVK5bZWRJFBSWyUTw+sJtLCSMC+oEz1SBNIda+xj8J6ijS1FQF1kdLtjVYE6ns3thaZ0pAhkMtvkUdq5OfkXHwWau96FxsdBbfWRIJBts0vCLX79qoKt8+zFRrH2+ogQsGwudtFR/IIjeyws0GYvtl8fajbKttVBaM7WGD/MwMuL7EUHwBuRoqeGq2PdV1C9TjSvBWh1sYuNrHwgAS+9pi88tgIYx0TPDFfnOhO2jwI4Iym13O9CI38WSiLgKe1qrwp1VjePxDVrsq6nOf8io1BIlk4gg5B/0S01emew/VSvz+ny3BG56M5/A6odz6T9kIB5DHwFxtsssbuUTw1IYDEI8wm4koF9xO7LI/apgd9UquYwS74HzHeU+LHHeyToLdWcHWq2KjuESjU4Up/b/B9u0kgtWdqPkwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-occlusionActive:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAACJpJREFUaEO9WntsW9UZ/33HaZJCX7S5TmhalQ1NaAzGYJUgvk7pe5QKusGa0v0xaZvQkKBbK60BWl/b8XWKaLWtUJg6IcG0P+hrCMHWlcdK2OLrFPFYGdU0ac+q9BHfFPoC2sQ+33SufVPXvfa1kyaWolg+3+t3zznfd87vu4Qx+nQfONc8yF+0KvP1NPHoxjsm9Y+FK7oSRo2+fh0I3AUpFwHUyoxWIv6EiI4o+8w8m5mmE+EowEchxH4g95rZ1myN1v+IAUR6M3eRwCqAFgM4C8ZeZuyFmHDk9OmpH2+7my4UB7fmj9wwderpWZBDs4mwHOoPmAzwn1hiV7I9+NpIwNQMwOjtvwWC1gLUwVJuIfA+s73lnZE4N3pP3M6gZSTEeoB3Q/JWs735w1psVQ0g/t6xq+T5uogkrCXQKyDebIaCf63FWTlZI525FUydDF4hGFtFYzYZnzvz82psVwVAOSCI7QA3MuMJU9d2VmO8VhnDsh8gwuMAnWfIh6p5QL4AlFEAO5ixp45yj8T1lkytgdUiH7dOBLMceIYIKwGs9ntYFQFELDtGQJyJ1yZDwadqCWS0spF05qfEtJWl7Eq2N8fL2SsLwLBOtgEybeqa7yyNJthYjEVucSYqRN1HCX3GS6W2DMtmQIRMfUaflx/P4GJ9x6+Tsu6/xGJpIjzjzdEEWEk3tvtQvZzVsgPM9zEwJJhXJMLBfcU60dTJJUzyDSGyX+pqu/Z/pfYuAxA7xPXytP17YvowEdY6xyr4dekjEydxo0oG9170QadY8neS7drbl4KwNzPxLWKqdk/XTTRYPHYZgGgqs0kSLQsExJ1dd8w4MxYAOlP25AaCCv5uD/vHIHP3FdeW2IGTU3I5+WfBvC8RDm4oC0AVKRbUR6Af+u3+kQJ7rPfwNXVi4k4CLS1vg/+Vy/H9m+Y1/82VUdmQwc+T5LbiYnfJDBhW5gWAGk1dWz3SACvpxd8705QbHNwJ5kV+9r2yj2HZOwA+b+rBH7j6wwDyZxt6CcThagqIXwCl47GeTIusJ7Vs7vTTZfDBHGVXPxGa+Y9i2ULFTrHk+92z00UAlv1bSPmfSjnXz3G58c6ej2c1TGjYCYLub4P6JAZXd+szD3vJRnr74xDiy0ld+74aHwZgpOzjknhVtx78i7+T6iU2Wp/OEZzdBcLtVWj1BJB7oFK132hl5gmmXWZYu3YYQDQ9sIiZt5u69pUqnFQtErH6rycWKvhv+ioR9l2QWLU5rJ31kzUs+59E9FAi1LTfmYFo2t7MzFebevBhP+XScTWlVCdOmG3a9uKxSMq+QRB2MvANX5vML4shraNrAWV9ZQEYVuZZIvosEdI6HQCGlUmzpA2lBcTPWLTPXscSv3DkmOeZ4WCv+ho7cPLGXC63i0A3+dnIj9NvTL1pOLP46UR67fkkeJOpB0MFAPZhxoSFSX3av/2U3XHD6m9j0H4CTSz89kmgoeGG+NwpA3kH6PGzxcAOAbQyMI8IW9QT9dNR4xHr1PWEobdMXZvjAIik7OzpM01Xl14DKxkzLHs/gIXFMgR6KqE3rXVsqqUlRKy8DX7e1IM/2nCgvzmQFQdBaAFjnRnWtvqBcK6nUwY+S4a1OlLswfnc5x+ZejDop+iOq/VNhEtytLOKwIeSevBmFXxgQt3u3FC2wwsEMT+bCAcfGbZn9S8liNcdG1J2qf8qnTsps/ApTe+Glck0Bq66mWJ99m3MeC4R0vwzRcFYpM++lyRe8QKsTo1STtgC4KsiQB2lIAj4eULXflaqG7UG1jD4aReECthI2z1gzC/+zdWLpu33ifDgiAAYVn8nIJ70BBAQX/siK480CLwKJq0YBIG7E3owUm6mnc0/JJ2VoBKK2ksXZ6DkhOoCGMkSilr2cgb+4BHIOVPXJqvfN1rH5giqT0gORLv1azyrarVL1ktueAl5beKiLKL4nuMg9BPxtkQouEfJOxeR1ubjAKaXbOJ0Qm8aPi5E0gOPE7ACzLMAtBJwMKFrt44mcKV7ySZWPxjWpWk0lrLnSsK7pY4Y9FhSb3KWjpG2fwmGk3HcD5NYlgzNcAgqI22/CsY9JTbOmro2ZbQALkujpYWs8IRVSa8vdUa53OLEvJb9sZ5Pp3F9tsettAyKJPWm7vwDKbNHCG+bIW3BqAFcVsj6BkzkuNEMa+td4woUQG0ezgYCCCyI69MPRdL93yYWLyvKJRnWOpy1n7ZXC8aLXkGWy0C1AjJS9hYE6LzZ1mTkK7EiZ6V4ztS1G11jbnCegRB3uPshms6sdL87+8nKPE2gNV565S7mNQOw7L9DyAcVOXzxOG3ZRyBz3y2+ixqW/QEAj01HD5t606+8HEdS9u4CKVUyLB819ebNtQZbKq/4VIjA70xdm63GigBkXmDJhy+reB6b0b3u5fO0nA8hOKlrTgUtLj7Dmxu8LakHfzLa4J0Zdo4oNMe9VlZ1pYyoUk/i62DSAc6wlMfzpf7ioc0lwAqsxnUEugDB7yCLd8127f0rEXzFK2U+e9R2qfcCcCUCLWej4qXeAVAjrTKeAKqiVRQItQQgaLng3BI/Jnq8ACjGWlLgTUjeW5HYUgCcRsZgfY9UG7qQ28tN6XgBUJlNCJoj6gcXlDY+PMndwmb5wI9WHw8ALs0O4tu8+KoK9Hq+sVGJXh8PAHl6vXyjo3KDo3AtLEezVzqvjzYbubQ6A3G3xpQ5nlR25TY6iLGF6kRyrBhrNwrFRHNWRpiwvlJjw5WvqvuSb3gEfs2gIAFPjhVznU+VeJTAGSFyP/ZqaJQ+7qoAKCXV+OBTdnw82qw0TYuXNjLKrZOqAbgGShvdLOitkfKpiuckyQvHpdFd+gQUHQ9B3yPGEhDOAfwGS9pT1asGgldCNTgYk5igCtSL4/aqgddUKnIY4G8xc7ialz2IKAXQ64qcHW22qnkJVetwvF63+T9nAHfjRfzL0gAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-logSave{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAA1RJREFUaEPtmT1oFEEUx9/bFLFKJzG2WiknZObFxlO8RtFC8SuFWlrYGBtRRNCghV9YmFQWgoVaaAS1ULRRRKvbmYBBQezVYBOIGFMkT0ZuYXPufOxdspuDO7jibmfe/H/z3sy8eYvQ4R/scP3QBSjbg8EemJyc3La4uHgUETcx82YAWNum+N8A8IWZrxLR41ZtBQForUeY+Xarg/j6MfMVIrroa5f13AuglOJWDLfQ56aU8mzefk4ApdR9ADiW12ir7RFxXAgxkqe/FUBrvYeZXzQbY+bh3t7ed5VKZTrPQM2eNHYQ8VGGjTtSypOhtq0AcRyPIeKptCEppTfkbAM3AxhbcRwfyYJg5nEiCvKEVZBS6hUA7EoEIeJ+IcTz0JlpbpcFYNrYIBDxlhDijG88lwd+IGJ/YoCZ1xPRd5/BUA/Mzc31VavVWRcEM18jovOuMV0eWLL7tBM+RoBS6isAbEzERFFUHRwc/JD8dnjishDikg2iSIAJADiU8uhdIjqRFuZYEweI6Gmuc8AWs6EhVK/XR3t6eg4zMyKiEQ/MvOSwQsS3iHhhZmbmY61W++UIp/dSyu2FAmit3zDzTjMoItaFEFuVUt8AYCB0ElLtZqWUfYUCZHlQKXUQAJ60AAC2Nbhia8AWglprAoB7jYQwmGXVACSKzRqJomgLAJjvBh/JqgPwCQ7dRAoPIZ/w5HkXIHQGQlOHvCd56PjdEOpoD6TShT/MfD19OXeFQCPnOYeIaxYWFiaGhoZGQ9Pv5nZthVA6XWjkOsMJRGj+b/IhIURtVQCkIUJvYKUCONLf/+67tjuw+T+rLlTYLmSDCDmwbOIbF6CgC1Vba8B3m3JBuMQXDuC4iGQy+MSXAhAKESK+NAAfRKj4UgFsEHnElw5gBGit9wFAUqy9kbco1vY2Gsfxsha2QrbV1K42gIimAPDvw8zTRLQuy0ZhpcU8AMZ7zPws1ee1lHJ3LoDlLu7mAcioZFuLvYWV130AU1NT/fPz8zuyqtWIuFcI8TKXBxo7QaEvOCyQD6SUx20T4K33F/iKKVOj7yrqBWhsiSv6ki8zNBBPCyHGfKEXBGCMrMBr1mZtPxHxEzN/jqLoYbr07oIIBvDNRFnPuwBlzXwybsd74C95KWhPrxIhsgAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-logSave:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAAAthJREFUaEPtmTuMTUEYx3//TrWdeLRUJCrReISGUBCvLVAqNGiEiASh8IoClUKyBQqPBAWhsREqnYRE9NgoV0T3yci5m9mzc2bmnHvvOXuTO8ktbs7MN//ffPP45hsx4kUjrp8xQNcezPaAmW0EDgFrgLXA0j7F/wG+AlckPW5qKwvAzE4At5p2ktHusqTzGfUWVEkCmJk1MdygzQ1Jp+u2iwKY2X3gcF2jfdS/I8l5O7tUApjZTuBlwNIk8E7STHYvQMCTzs6jgI27ko7l2o4B3AaO+4YkJadcVcdlAGfLzA5WQGR7IgbwGtjuCdoj6UXuyJTrhQBcnQjETUmnUv3FAH4CyzwDKyX9SBnM9QAwIWk2AXFV0tlYnzGAebtPP9OnEPkNWO2J2STpQ+9/xBOXJF2ogmgT4Amw3xNyT9JRX1gEYq+kZyGIoQGY2UXgAPyPt5x4V8qH1TRwDvgk6XdkOr2XtLltgLfA1qLTj5I2mNl3YEWDdTQraaJtgAVryMz2AU8bAFC1Boc5hYKbgJmtB6aKgDCbZdEAeLuOWyPrit+qFMmiA0gJrjr4yu1an0Ip4Z6Hss6hMUBu6FD3JB9PodwRGGkPeOHCX+CafzmPDUAR85wBlrgwQ5LbUueV3AHsaxGbmR8uOAGTPYga8f+0pG2LBWAOosYNrFOAqith6L5bdQee81optG7nHIjE8DlnVlB8EVa3AxCJ4VMAleJbB2gAERXfCUANiKT4zgAyILLEdwoQgcgW3zlAIWA30EvWXq+bFBvESTzQxFZqSyqdAe7i7xIAvTIjaXnIRmupxZoAznvPvTZvJO2oCzDQ5G5NgPKbRGWyt7X0egrAzFwedktFtnqXpFe1PFAsxLYfOEIaH0g6UjUAyXx/i09MQY2pq2gSoPDEsB/5QuJPSnLrMFqyAAqIQT+zloX9Aj4DX4CHfuo9RpANkBqJrr6PAboa+V6/I++Bf0in3kCazcMZAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-zoom{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAkUExURUdwTMDAwL+/v7+/v7+/v76+vr+/v7+/v7+/v7+/v7+/v7+/vxtcv/AAAAALdFJOUwBVdZCpGdZE7C3B0wnY6AAAAV9JREFUOMt1lDtPwzAQx20ghGwVsHVJhQRDlsywWKqEULsw8FhRJ2iWiglGGJvFCw8xIVVCSEwkKU25L4ftpOHs2Cclsu7v370smxCLecRh+xOH8Ja5kGEs/6dtYf1XONcWZurbwwRCQjYZ9l7uHAPAOBHpI1QC/QSY9g84ldmnAn7Z3htwsfZnvSe54eJd5hlcC7gccxTwSETZEr5+714vLRdfkFraVNtetXLpR7ha+mkHCV1Y/q8ZOW8AgDkecsIaAEIcGbIGyLUq4KcBYl0o7QAB4FZACBMrQKpaJOCZQqyAIhgayYEpgD1DZghXCvBTNJFKmCvgzmhcCEsBwMjSRx6BslYftRX0JLYKrAsLqzDC54EFMwvy58QhMM8hFNq0UpwET+sLC9/4BmEBTytyRCIbDoAEiR1Qp1dbRxfoyv9g3ogaKVuXhVZZbtqX6Ez6H23PxG4CM259QDwt/h8ABfK8nDqSAAAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-zoom:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAzUExURUdwTP///////////////////////////////////////////////////////////////8/RimEAAAAQdFJOUwCCyreU7KZVRCDaZnETLwj6WaYVAAABUUlEQVRIx5WV2Q6EIAxFoVBWxf7/184AOnGUsvTBqDmXrqRCzNou1kwGtyZIQNuiD0P6evWTURFUMtIoPO+iVoGIZPkMqkdKA0jZgoJ6suQc2EoiGBldOoQ9HSDk5+GTs1Fqo9IlcEpvNv3qv0MBhSalIFSv3+MCJCY8fUaCxasycrMu9dp5VTUhubXZ6CqsAvn66ZAsx+e8fON37oT3bZ4aiRU0kDlavGSrQI95HPBZACt8FuCL16IruBVkzBeBW+CLYHvyfiDQd96IpHpOMqP++fAq9VMQ7rwrM5+6gjwG/3y/D/m8ylPYpvpAeqO7DfvwMPO9D6jnBabG56YFZx77fEjD+9AyLdYEhp8nxoNh5wmJtfY8AS+wzIZZrJNcC4h3wV8IH1p87Iyfnc6XVaAd7ICHAsebw97zgDSxZ/yvViiPuYUfTe650knM2+7Ywz9yOCklzohLOwAAAABJRU5ErkJggg==\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-zoomStop{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAMAAABg3Am1AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAzUExURUdwTP///////////////////////////////////////////////////////////////8/RimEAAAAQdFJOUwDjmTC6Y0KBVakX8s8iC3JYR1wyAAABVUlEQVRIx62V2baFIAiGcwJRS97/aU/Zbtht0VzrcJOuvl/AAabpXy1RF0F/m2h2PX5mzucEuO9gMon1MY7KlG8w4LImC1XFQmxDGVkm1DYpXi0qS1gXTEGzKoqNS1ajAxPqJDikFLcFS+aukYJffRfXnAj9kYUXFVi86wzzPjuCFRXBwS3KaC/Pt02Ww7uf1jz3BVYtX/OsyDQFDn7yswPXb+Ovs39eJJWwxrOXV4thgC+//QBf/uMAXwAa4AuRBvgdWR58CB2B+eaRG5e8QO7Jn2sIAvzhbdsDnXzev8q0BengGXv8EciH3y1CY5++yI/BRNx82g/La7lc7b0Am/tUC6n3HoZ4SbDyy5BgzSNivV6wbPXeIPOpHlOU+CjkTQKvQOpXdRdJvn+5xttWQa4EZUOzluonT6+q72X6Zb0+T/lNhQd7hg9vm8LWUhW5gT4iN7c/0ZUo8Q3AttYAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-zoomStop:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAqUExURUdwTBKV2hGW2xKW2xKW2xKW2xGV3BGW2xGW3BGX2xKW3BKW2xCX2xKW2yDN61kAAAANdFJOUwCAVKxm3T/DmhrxKw97YQBGAAABaklEQVQ4y2NgIA6wGqCLMCqAqV4HdIm9F0Ek22UMM9hiG4AkVzQDA+cmx+YyJJnDlw8wMNQaNpfevbu0QgxJgrN3JQPD3VvhjU57kESnOZqvvXuJgcEXxVFMEkDt1w11gZYwoVgue3d5SzaQSgCydZG1TMkBU8vBuq9guBcqNBNDgj0GQrMEb0CVYEmA2ViANRQZ795tQPZduADciXcVUNSthGu4hGrAJawagBIXsWoAClzGqgEocRWrBqDIrQNQDTPRXZ8A0cB41wBNQgGiQfbubTQJAagGDB0GIA0OsnevbkCTCADiu4J374ahW34JqAEIbiXMRJOAgkvMKJYgJBx6717AKiF4F4eOu+jxgQCXGHBIKPDgkBC8JYCURJDtQHYWN4oEcvpZiyR+C9lZtkgSKKG1CyGOGrxItleixjrcLFBWRAYcMIkG9NQMiY67N7AlcxAQwMwA6SBxM2xZQ3jtVRPsRQ83sg8ACMIUxzzE8wsAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-close{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAeUExURUdwTM3Nzc7Ozs7Ozs3Nzc3NzcrKys3Nzc3Nzc3NzePZJxkAAAAJdFJOUwCtKdPBGAmNTt3jdDcAAADfSURBVDjL1dOxDoIwEADQgwR07GTCZtI/IGHgAzBhM9EPkMTB0Y3V0ZXIcn/rtRR6pefgSKeSu3ellyvA9lZ/5F9p/3K7PZY8oPG5BD6MpPUSgIITzdIStifAshjRQV1PCFT8TxaicTzzwEwINOEdHVmDmcTAkRhMhMAp7iQRjcMtDhCp8SA1v0ARGIIK/gnkv0p1OBTS4QRUIpE7DiYYXTBrzcld3JIrAarXrps4AVNwRSZgExoJmIyAaAdsShUMn/JF2fh4YEkpAcgvnuwYCIb6EbbbP4PsDfLD2dD6Av1qTvAQlzUTAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-close:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAeUExURUdwTP///////////////////////////////////8kBMKEAAAAJdFJOUwCt0igUwJdJePGbgLgAAADcSURBVDjL1ZMxEoIwFESTCaOWFDapqeicnAALTiANnaWlnVewDTMW/7b+aCAb8jkAVIHN2/lsNkrt73lf8M08nnF1pAYFR/dFmAAx7SIoi4iDbRrWDMAuQFzmmxAGbjjJgjj6dCjMCAND/o8RWQMzUgIRKYE/wsC5TJIRR74rBUZaqqXwLZEXT0WTDGwLW1aavJWQir9qadw++NgykWoMNtcykh8Q5EECgr5C+jjpGjHjPGhPU5eVzyfPJitfnUyhPg6ywMKZ7BygcYcsPCj1Kc8uXYPqpeSLs6PnC4w8S+8OJ9MLAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-narrow{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAABm1JREFUaEPVWWuIVVUUXmvfYRosHzUZUUqKjkSXSe5Z+5qJhfawLELCR5ohPhKjjB4EWgkpkopRSWEPS1Mqy7Gkd0iPGaKy6ey9Z3z0opwEQytSMiS5NZ4Ve7j3dubMOfecO96buf/cx17rW+vba52919oH4RQfeIr7DxUjoLW+BhHHAsAAZh5gPwGghpl/TKVSHZ7n7T9+/Ph+ANg/atQo+1mRcUIEtNYzAWAiM09ExLPK8KgNADYw8wYp5Z9l6PUQ7RWBvON3AMClJ2Kcmb9DxAKR33qDVRYBrfUNzLwQESf0xliUDjPvR8T1RLSsXNzEBIwxc5l5fYSBnxGxxfO8bQCwN5VKHc7lcofr6+tzR44cGcTMg4UQgxDRfp8IAJdH4KwlooXlkEhEQGv9EAAsDQF+GwDWEdE75Rg1xlwFADcx83QAOCOg29SvX79ZDQ0NuSSYsQS01h8BwBUBsF45HnTIGHMBM9uFmR2Ya0bEWxzHORBHoiQBY8xmZp4RAFlNRIvigMuZN8asYOb7Azq7hBCTMpnMvlJYkQSMMUuYeblf2fO8ZdlsNiyVyvE3VNZ13XFCiGb/JCK+6DjOrLIJ2N0GAN4KKG4hIpuzVRtKqf6I+HvAwAIiWhdlNDQCSqntga3yUyK6rGqe+4Db2tqu8zzv3cJfiPiLfQYdx/k6zH4PAvlD6iWf8B7P8yZls9mO/4KAtaG1fhgAHvCReN1xnClJCXweOGFLhrAapJqamlLDhg372H9eMPNMKeXmoL1uEVBKTUXEJp+QJiJZDSfjMIOZgIibHMcJbrfdq1Gt9WMAcI8P/D9f/YJtpVQfW7n6isSDRHReyQhorXcCwMV5oWN1dXWD0un04bjVqta81to+i7bi7RpCiAmZTOYDv71iCu3cuXNQZ2env07fRkSTq+VcEtyQ7XwNEfkz5N8Ucl13khDiDR/b+ZlM5vkkhqopo5Q65Euj74loRGgElFJ3IuIThUnP8xqz2eyeajqXBFtrvR0AiuU7EXXbeIo/tNaPAMB9BVBmrpVS/l3KiDFmBjNPKbMbK0A+6zjOq3EktNavAUAxlUsR2AIA0ywgM3dIKYfFgRtjmph5apxc2Dwz/ySlHBynq7V+wV+tliLwPgBcmwdsJaLRceDGmD3MnI6Ti5j/g4j6x+lqrZ8EgGKTE0nAGPM0M9+Wj8BhKWV9HLjrukuFEPMBoMf+HKdrm3oimhcnFyy1IwkopRYj4soCYF1dXf3JPAMKfiilNiFisaSOJOC67nQhxCu+h3i0lLI1boWqPW+M6WDmoXk7+4io8L3rr+IuZIwZycztPocWEdHqajtYCl8pdSEifuOT2UhEc/w63fZUrfWvADCwixlii+M4408mAa31AgB4xufDHCLaGElAKbUZEf09cAMR/XCySGit7TXO3IJ9IcTQYI8cLKfvQsQ1BYVq9sBxi9La2jqipqbmMwA4Oy8bWtoHCTiIaBua0/JKB5hZSikPxhms9LxSah0i2i26MHoUct0eYt+2tRIRF5/MKGit7YFqD9auYU9tRBwfls49emJjzEBm3gEAxVKCmadJKbdWepWj8LTWHwLAlb75hUS0Nkw+6lbidkTspoCI6aibgUoSU0otR8QlBUxE3Oo4TleNlpiAFQwWUfa/XC7XZ8yYMccq6bAfS2v9OADcHUidqUT0RdkE8iS+BIBsQHkkEe2qNAml1HOIeGsA914isqQiR5LL3aMAcHoAYTIR2av0Ex7t7e3ne563mplvDoIh4le2xC+VurEE8pGwh1m3/sBec3ie96iUcndvWLS3t4/o7OycK4SYw8znRKZIDIlEBPIk7IrfGDB01JIAgJZsNtuShIjruplUKmWdtidsMLK2lFmFiPP8fUapSCQmYJ2L6cDsNfh7iLiXme1VjG3GOwFguOd5DYg4HAAa7O/Q3QTRXiavchxnhzHmIgCw3V6xWYoiURYBa9h13atTqdTssJxNEoEImQeJaIV/LimJsgkUjCilLrGXToho33mFrmoMod2I+LLneW9KKb8Nk01CotcE/AaVUmOFENcz8zgAGAIA54Y4dAgADiCifejtjcQnSSIWRgIAniIi+5q3cm/q/c40NzfX9e3bd4gQ4kxEPFhbW3sgnU7/lcThJJHwV8kViUBvHStHz0aCme2qe7W1tasbGxu7rkFPGQKR50Q5q/B/lP0HjgOoT/ydvaYAAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-narrow:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAABS1JREFUaEPVmgmoVVUUhr+fosKC5ogGSsqIpKQwMqjIBpsICdMGQ7ISo4wGApugJCoxLClssElpNEuaI5okKjOI5onKhEQrSiKiaKA/Vu3zOu+8c88593qvj7fgcO99e6/h33udtdda+4khThri9tM1ALaPAQ4Btso9GwNfAyuBb7JHUnzvCq0XANuTgePSs00bFr0L3BuPpF/b4BswtSMAyfDzgYPXRznweQ7ID53IaguA7ROBGcC4TpRV8IRL3SNpVrtyGwOwfVYoaaHgW2AZsBT4CliXnt+BXYBd02d8D5c7rIWc+ZJigRpTIwC2rwauKZH6NLBA0jONNQK2jwJOAU4FtijwPgpMkRTga6kWgO2XgSMKkjoyvGiN7d3SwpxZGHsVOEPSmjoElQBsPwScVhAyR9LMOsHtjNu+Hri8wPMBMF7SqipZLQHYvgq4tsA8S1KZK7Vjb+lc24cDsfJ5ul/SlLYBpGjzVIFxsaTw2Z6R7S2BnwoKpkta0Epp6Q7YfqEQKl+XdGjPLM8Jtn088GzuT9/FOyjpkzL9AwCkQ+qB3OSPki9GOrBByPZ1wBU5ZY9LOrkpgDcLJ2zlFvYCke2NgFcK58VkSRFU+lG/HbA9EYg4nNE7kkb3wsg6mSWesEhSMdz2z0Zt3wRcnBO+wVc/0217WMpesyRxraSd6nbgfWC/NOm3OP4lRVowKGQ73sXIeDMaJ+nFvDF9LmQ78pR8nr5U0oRBsTwpLQnn8yTlPeR/F7I9HngiZ/A0SXcPJoDQbftHIHOjLyTt1WoHLgBuyQ3uKylC6KBS8UyS1C/w5F3oRuDSnLWbSPqzynrbkSdFfG6nGstE3inpkbrVsf0Y0OfKVQAWA5OSwJWS9mggPEJuhN5OaLWkqBMqyfZ9QF/4rALwPHBskrZC0pgGwsPFRtbNazH+s6TIfeoA3JqqwH/nVQG4HTg3SVsnadsGwiMznQYMiM91vKmgP7tuXjHVrgJwGXBDTuC2g3kGZHbYXhQVWva7CkCkyg/nAIyRtKJuhXo9bjuSyOFJzypJ2ff/XCqHdBTwXs6gmZLm9NrAKvm29wY+zc1ZKGlqnqeYzH0PbJ8mLJM0dpABTAfuyNkwVdLCKgDFGniEpC8HC4TtaONEOyej4cUaubgDFwLzcgw9q4HrFsV2pAxvANuluaWpfRHAAUAUNJsmpmhrjJa0tk5ht8dtRx0cITqjAYlcv5c49zJHKI2QmtEG3wXbcaDGwZrRamBsmTuX1cTxEi8H8qnEJElLur3KreTZfgk4Mjc+Q9L8svmtuhLnAUWGka06A90EZjt6UdGTymiJpCxHG6CqqrHVL4lKnMMkRaXWE7J9M3BRwXUmSnqrlcK61uLbwIEF5lGSou3XVbJ9F3BOQeglkgJUS2rS3P0F2LwgYYKkaKWvN9neGYgT//QSYR9Hil/lurUAQqjtOMyK9UEkWXMlfdgJihTn45CK1GCHChmVIBoBSCBixU8qKIrdmRuXG5LigqOWbO+fjA7jizsbqcxsINLsfJ3REkRjAAlEVQUWbfDncjc0UYz/BewJjCh8lgGNZvJsSctt75MabLUg2gKQQBydSrwyn63dgRYTrpQUdwR91BRE2wAyDbYPSk2nuPOKVW6X4t15EHhS0mdlzE1AdAygsFpxwX0CEJcUuwM7lhgULhW5VRgeHYnXmiBuAeI2SXHN272b+gKgzRKQrYFIBNdI+qOJwQ13oi8/68oOdGpYO3xpJ2LV/45zI/t3hSEDoBXYIQ/gH99H3EBePlczAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-expand{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAABvJJREFUaEPVWWmIXEUQrnqzZseYxGM9QBNF4hrJEDfzqieuAY/gFRXPyMYbzUHwQBQUzx+KeBAJKhrRqEhEQWMU75AYXREPlunu3bjxIDGraIwiuJpFkL26pMK84eXtm3lvZieGFAzsTtf1VVd3V9Ug7OOE+7j/0BAAnZ2d2UmTJl2cyWTaAOBA+TDzgYjYDAB/y4eZ5fNjc3Pze7NmzfqlUYEbFwBr7TUAcAEzXwAA2bROIeKnzrkNALBeKWXTysXx1QXAGHMVANwEACePxzgzjyLi057nrczn81vr0VUTAGOMRPtmRDy7HmOVZBDxL+fcyr6+vgc7OjqGatGdGoC19hZmfrKC8t9LafEWAGzLZDL9g4OD/S0tLYM7d+6cyszTPM+biojy97kAcGoFPV8ODw8vbG9v354WRCoAxpiXAUDyPUrvAcAqIno/rUHhs9aeCQALmflyAJgUlkXEf5xzp6U9G4kAjDHcKMejeqy1xzDz/QBwXYyNViL6ISkwVQEYY74AgLkRJcuJ6M4kxbWsW2sfZua7ozJNTU1HtLW1/VFNV0UAxpjlAHBHWNg590ChUJCINZyKxeLpnud1RhSvmzJlyiWtra2DFS+AuAW5bQDg3cja60QkObvHSGstj588fGVi5heVUktqAqC1Xh+5Kj8nolP2mOchxd3d3ec55z6I2JpPROvj7I9JodIj9UqIebNz7qJCodD3fwAQG8aYhwDgnpC99UQ0Py2ALyMv7DIiWvV/OS921qxZk5k+ffon4fcCEZf4vv9i1I/ddkBrfSUivhpiMkSkanXeWnsFAFwCADNHR0fX1nPwYzJhAxGdkwTgJUQM38l1Rd8Y8yYAXFoytoOIjqo1CFrriQDwCyIeEsh6nndsPp//KayrvANbt25tHhgY+AsA9i8x/JvNZqfmcrn+Wo1bazuZ+fRAjogSH8w4G8YYOYtSOAZ0GxE9EQvAGCMRk8gF9BYRLajVeeFvIIDdrnOpt3zfnxcLwFr7CDPfFdqupfl8/oW9CUBsa63/DKXRz0R0TCUAzzDzDcGic25WoVDYvLcBGGPk/i+X7wMDA/vNmzdvJPCrnJvW2leZ+cpggZknKKWGqwGw1l7IzOcg4swoX/gMyNbH6HnO9/3XkgJkjFkLAOVUzmQyM2bPnr1lDABjjJTE58sCM/cppaanUK4BgJL44taZebtSalqSrDHmpXC1ysynKKU+j9uBj5hZ6nShLiJqT1Jurd3MzLkkvgrrfxPRwUmyxpinAODmSqkdTqE3mPmy0g70K6VakpQXi8X7Pc9bCgBHJvHGrEsjtCxJLlpqDw8PTwt3bGEAzzNzuerLZrMt9bwB4lCjrtHSLbQaEa8NgGaz2cm5XO6fuDPwGADcHjrE7UqprqQIxa03EoC1to+Zjy3ZGZN25R3QWt+IiCtDDt1JRNLU1EyNAqC1PgERvws58DYRSY1VpjKA3t7eaUNDQz+Xtybm1UuLpFEAjDFyRp4N2a1cSsTlLgCkaqyjwBoIQMrnRYH+0dHR/Jw5c3pid6B0YO5GxIdDV1ZdPbC19nLn3GOIOAUA1hLR4rS7F/B1dXUd39TUJEOFQ+U7RNzo+/5ZUT3RfsBHRGloZCgrtIOZlVLqt1odGC+/1noVIsoVHdAiIpJHbTcaU+ZqrR9BxHJRtycnEZVAGmOkfVwXOo9bJk+efGLcdGIMAGvtYcz8FQCUSwlm7lBKvTHeqKaVN8ZsBIAz0qRybKMRc6VKDuZ83/82rRP18mmtH0TE+0LyHxNRUOKMUVuxU7LWlkuLQGpwcHDi3Llz/63XuSQ5Y8zjAHBrwMfM/cy8oFAoxFWzu9iSRou/xtQ5bUT0dZIzta5rrZ+XyUNYDhFv931/RTVdVQF0d3e3OufKtXdI0QIiklH6uKmnp+co59zycC8SOrzfAEBHtdRNbLaLxeIMz/O+H3N9Ia52zq1QSvXWg6Knp+f4kZGRRZ7nXc/Mh1fSgYhVQSQCEMW9vb1HDA0NyS0UHS/KLF+2+NNqeRp2rlgs5jOZjDgtL+wBEcdlEv0oIi4O9xnVQKQCIEZkTuN53uqgZ4iJmMxrPkTEbXL4AECaceldj3POtSLicVKayP9x0UZEGSY/6vv+V9ZaaVHXpAGRGkBgtFgsnpXJZK6Ly9l6Uqkkcy8RlUsY+S4tiJoBBE5qrU9CxKvlZ1YA2G3UkRJIr4wxnXPvKKXGnLG0IOoGEHZSnn5EvIiZTwKAowEgrh39U2orRJRDLxOJz9IAjdsJAHiGiORn3sb8Uh91ZNOmTQeMjIwcjYgHIeJvEyZM2JHL5Wr6+TSsMwoCEVf4vr+re2zIDqSJ5Hh5BAQzS9R/IiJpf3fRPgOg4jsx3sjsbfn/AH37LF5g3/BiAAAAAElFTkSuQmCC\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-icon-expand:hover{background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs4c6QAABYZJREFUaEPVmXmoVVUUxn8f0WRzVkJlIWVGUmIJWWADTVaUldFclCnSQBQYNv1RSCWGVFRSlkRh0GBS2YBmJWGJUBHNNJiYWQSZfwTRxBfrce5jv/3Ovefc++4z3oKLD/fea61v7bXXdMQQJw1x/ekKANs7AOcA44Ddkt/2wJbk9z2wTNIP3TLcgADYvhw4q/gFiLq0ClgBLJf0Ud1DZfs6AmD7UuA64JiBCAf+BR4GHpH0TSe82gJgO6x9PXBqJ8JanPktQABzJP3VDu/aAGzfADzYhPnPQLjFUuA7YHPx+xPYHxhZ/Bt/nw4c14TP+8CFkjbWBVELgO2ngfD3nJYBCyW9Wldg7LN9cigKXATsnJ39HTi+7tuoBGDb3VI852P7QOBO4MoSGaMlfVtlmJYAbL8HHJsxmSdpdhXjdtZt3wPcWnJmhKRfWvFqCsD2PODm7PBdksJiXSfbJwDvZIzfAM6VFG+plEoBFNHmlezEc5LCZweNbEcSjMSX0iJJ09sFsDwLlaslTRo0zRPGts8AXstkTZYUOvWjfjdQJKnFyc7PgCmS1m0NACHD9t3AbYm8yNiT6wKIWJxm2JmSFm4t5QsA2wBvZ/liuqRFuR59bsD2JcAzyaYPJU1oV3nbF8fjAw4DlnTy8Es8YYWk06oAPJnF5I6sb/tF4LxC2CZJ+3VghGFAVK17JmdHSVqf8uq9AdtR+kZNsmOx4Y9I/5KiLGiLbEc4jLDYQ5IqE2aZANvxFqNwbNBNkh5oBiAsFpZr0FJJU9vSvNjcRQBRPKbhfJWkE5sBuBe4JVmcIemJ/xNAyLb9a+JGGyRF+dFLqQstAK5J1g6XFCG0berWDRQA8py0raR/et2z8YftiD4RhRq0naS/W2lv+2wgIkNEm5x630BRaufrj0l6tso6tpcAqSuPkfR1GYAoic8sFtZJOqgG8w+Ao6r2NVnfKCn6hJZkO4+MkyStLgPwJhB1etBaSRNrMA8XG1u1r8n6Fkl7VJ21/VDRBTa29nHt9A28AJxf7NosaXgN5lGZzgD2rdpbsh6N0MyqcyWl9si0Y0sBPA6kVd/wTnJAKNTlR/wUcEUCdBdJ0bX1UArgPmBWsnGipLVVFipb7zKAKCJHFXL6uV0K4NpiMtDQabakaGrapm4BsH0o8GWiwEuSosbqpRRARIQNyVq/rFcXSRcBxBt5NJHbvJQo812gVmOdA+sigCifpyX8x0v6uPQGCgDRWEeD3aCOemDb0XrGm9q1KKevrnt7jX22DwFiqLBX8X8rJZ2S88n7gSOBaGiiMg3aBEyQ9FO7Cgx0v+1ooiJEN2iapEhqfaispcyLuo5uYSAAbEf7GBOJBkXpcETZdKIMwN7AGiAtJS6QFIluq5DtlcBJdVy52VglD6nBa6ykLwYbge05wB2JnLckNUqcfuJbDbbS0qJxcJik6NQGhWzfD9yYMI9ucKqkGByXUtVo8ceSOmecpE+6jcB2XsqEiFmS5reSVQVgNNBbeyeMwioxSh8w2Y6GPzJ+2os0+H4OxPtr6rqVzbbtMcBXJZpGkTVf0qedoCjifCSpq4B9WvBoCaISQDC2PQKIN5GPF6MqjCuOsqOpn6bK2R5fKB3K75QpHpPouUAkvrTPaAqiFoACRMxpwuqNniE3WsxrXk++0EQzHr3rwVGSZP+WGTymD3MlrbEdLerzdUDUBtCQaDvSeXyQKPPZTrwpztwuKS1h4tZrgWgbQALkaOCy4hNrn1FHTRTxdmKQ8LKksjdWC0THADK/jtQ/BQhQBwBl7Wi4VNRWoXhMJN6tA7TJTSyQFJ95u/OlPlfEdjzOALI7EIVgzEfb+nyaGSh3p4h+Pd1jV26gjiUHuqe4ibD6eklRqvfQkAHQzABDHsB/7aMVT352GH8AAAAASUVORK5CYII=\") no-repeat 50%;background-size:100% 100%}.jb-pro-container .jb-pro-menu-icon-text,.jb-pro-container .jb-pro-quality-icon-text,.jb-pro-container .jb-pro-scale-icon-text,.jb-pro-container .jb-pro-speed-icon-text{font-size:14px;min-width:30px;height:20px;line-height:20px;cursor:pointer;text-align:center}.jb-pro-container .jb-pro-speed{box-sizing:border-box;text-align:center;font-size:14px;color:#fff;width:90px}.jb-pro-container .jb-pro-menu-list,.jb-pro-container .jb-pro-quality-menu-list,.jb-pro-container .jb-pro-scale-menu-list,.jb-pro-container .jb-pro-speed-menu-list{position:absolute;left:50%;bottom:100%;visibility:hidden;opacity:0;transform:translateX(-50%);transition:visibility .3s,opacity .3s;background-color:rgba(0,0,0,.5);border-radius:4px;overflow:hidden;width:-moz-max-content;width:max-content}.jb-pro-container .jb-pro-menu-list.jb-pro-menu-shown,.jb-pro-container .jb-pro-menu-list.jb-pro-quality-menu-shown,.jb-pro-container .jb-pro-menu-list.jb-pro-scale-menu-shown,.jb-pro-container .jb-pro-menu-list.jb-pro-speed-menu-shown,.jb-pro-container .jb-pro-quality-menu-list.jb-pro-menu-shown,.jb-pro-container .jb-pro-quality-menu-list.jb-pro-quality-menu-shown,.jb-pro-container .jb-pro-quality-menu-list.jb-pro-scale-menu-shown,.jb-pro-container .jb-pro-quality-menu-list.jb-pro-speed-menu-shown,.jb-pro-container .jb-pro-scale-menu-list.jb-pro-menu-shown,.jb-pro-container .jb-pro-scale-menu-list.jb-pro-quality-menu-shown,.jb-pro-container .jb-pro-scale-menu-list.jb-pro-scale-menu-shown,.jb-pro-container .jb-pro-scale-menu-list.jb-pro-speed-menu-shown,.jb-pro-container .jb-pro-speed-menu-list.jb-pro-menu-shown,.jb-pro-container .jb-pro-speed-menu-list.jb-pro-quality-menu-shown,.jb-pro-container .jb-pro-speed-menu-list.jb-pro-scale-menu-shown,.jb-pro-container .jb-pro-speed-menu-list.jb-pro-speed-menu-shown{visibility:visible;opacity:1}.jb-pro-container .icon-title-tips{pointer-events:none;position:absolute;left:50%;bottom:100%;visibility:hidden;opacity:0;transform:translateX(-50%);transition:visibility .3s ease 0s,opacity .3s ease 0s;background-color:rgba(0,0,0,.5);border-radius:4px}.jb-pro-container .icon-title{display:inline-block;padding:5px 10px;font-size:12px;white-space:nowrap;color:#fff}.jb-pro-container .jb-pro-quality-menu{padding:8px 0}.jb-pro-container .jb-pro-menu-item,.jb-pro-container .jb-pro-quality-menu-item,.jb-pro-container .jb-pro-scale-menu-item,.jb-pro-container .jb-pro-speed-menu-item{display:block;height:25px;line-height:25px;margin:0;padding:0 10px;cursor:pointer;font-size:14px;text-align:center;width:50px;color:hsla(0,0%,100%,.5);transition:color .3s,background-color .3s}.jb-pro-container .jb-pro-menu-item:hover,.jb-pro-container .jb-pro-quality-menu-item:hover,.jb-pro-container .jb-pro-scale-menu-item:hover,.jb-pro-container .jb-pro-speed-menu-item:hover{background-color:hsla(0,0%,100%,.2)}.jb-pro-container .jb-pro-menu-item:focus,.jb-pro-container .jb-pro-quality-menu-item:focus,.jb-pro-container .jb-pro-scale-menu-item:focus,.jb-pro-container .jb-pro-speed-menu-item:focus{outline:none}.jb-pro-container .jb-pro-menu-item.jb-pro-menu-item-active,.jb-pro-container .jb-pro-menu-item.jb-pro-quality-menu-item-active,.jb-pro-container .jb-pro-menu-item.jb-pro-scale-menu-item-active,.jb-pro-container .jb-pro-menu-item.jb-pro-speed-menu-item-active,.jb-pro-container .jb-pro-quality-menu-item.jb-pro-menu-item-active,.jb-pro-container .jb-pro-quality-menu-item.jb-pro-quality-menu-item-active,.jb-pro-container .jb-pro-quality-menu-item.jb-pro-scale-menu-item-active,.jb-pro-container .jb-pro-quality-menu-item.jb-pro-speed-menu-item-active,.jb-pro-container .jb-pro-scale-menu-item.jb-pro-menu-item-active,.jb-pro-container .jb-pro-scale-menu-item.jb-pro-quality-menu-item-active,.jb-pro-container .jb-pro-scale-menu-item.jb-pro-scale-menu-item-active,.jb-pro-container .jb-pro-scale-menu-item.jb-pro-speed-menu-item-active,.jb-pro-container .jb-pro-speed-menu-item.jb-pro-menu-item-active,.jb-pro-container .jb-pro-speed-menu-item.jb-pro-quality-menu-item-active,.jb-pro-container .jb-pro-speed-menu-item.jb-pro-scale-menu-item-active,.jb-pro-container .jb-pro-speed-menu-item.jb-pro-speed-menu-item-active{color:#2298fc}.jb-pro-container .jb-pro-volume-panel-wrap{position:absolute;left:50%;bottom:100%;visibility:hidden;opacity:0;transform:translateX(-50%) translateY(22%);transition:visibility .3s,opacity .3s;background-color:rgba(0,0,0,.5);border-radius:4px;height:120px;width:50px;overflow:hidden}.jb-pro-container .jb-pro-volume-panel-wrap.jb-pro-volume-panel-wrap-show{visibility:visible;opacity:1}.jb-pro-container .jb-pro-volume-panel{cursor:pointer;position:absolute;top:21px;height:60px;width:50px;overflow:hidden}.jb-pro-container .jb-pro-volume-panel-text{position:absolute;left:0;top:0;width:50px;height:20px;line-height:20px;text-align:center;color:#fff;font-size:12px}.jb-pro-container .jb-pro-volume-panel-handle{position:absolute;top:48px;left:50%;width:12px;height:12px;border-radius:12px;margin-left:-6px;background:#fff}.jb-pro-container .jb-pro-volume-panel-handle:before{bottom:-54px;background:#fff}.jb-pro-container .jb-pro-volume-panel-handle:after{bottom:6px;background:hsla(0,0%,100%,.2)}.jb-pro-container .jb-pro-volume-panel-handle:after,.jb-pro-container .jb-pro-volume-panel-handle:before{content:\"\";position:absolute;display:block;left:50%;width:3px;margin-left:-1px;height:60px}.jb-pro-container.jb-pro-fullscreen-web .jb-pro-controls{width:100vh}.jb-pro-container.jb-pro-fullscreen-web .jb-pro-play-big:after{transform:translate(-50%,-50%) rotate(270deg)}.jb-pro-container.jb-pro-fullscreen-web .jb-pro-loading{flex-direction:row}.jb-pro-container.jb-pro-fullscreen-web .jb-pro-loading-text{transform:rotate(270deg)}.jb-pro-container .jb-pro-contextmenus{display:none;flex-direction:column;position:absolute;z-index:120;left:10px;top:10px;min-width:200px;padding:5px 0;background-color:rgba(0,0,0,.9);border-radius:3px}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu{cursor:pointer;font-size:12px;display:block;color:#fff;padding:10px 15px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-shadow:0 0 2px rgba(0,0,0,.5);border-bottom:1px solid hsla(0,0%,100%,.1)}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu a{color:#fff;text-decoration:none}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span{display:inline-block;padding:0 7px}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span.art-current,.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span:hover{color:var(--theme)}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu:hover{background-color:hsla(0,0%,100%,.1)}.jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu:last-child{border-bottom:none}.jb-pro-container.jb-pro-contextmenus-show .jb-pro-contextmenus{display:flex}.jb-pro-container .jb-pro-extend-dom{display:block;position:relative;width:100%;height:100%;display:none}.jb-pro-container-playback .jb-pro-controls{height:48px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center{flex:1;display:flex;box-sizing:border-box;justify-content:space-between;font-size:12px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time{box-sizing:border-box;flex:1;position:relative;height:100%}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-inner{width:300px;height:100%;overflow-y:hidden;overflow-x:auto}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-current-time{position:absolute;left:0;top:0;height:15px;width:1px;background-color:red;text-align:center;z-index:1}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-current-time-text{position:absolute;box-sizing:border-box;padding:0 5px;width:60px;left:-25px;top:15px;border:1px solid red;height:15px;line-height:15px;cursor:move;background-color:#fff;color:#000;-webkit-user-select:none;-moz-user-select:none;user-select:none}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll{position:relative;width:1440px;margin:0 auto}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.one-hour{width:1440px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.half-hour{width:2880px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.ten-min{width:8640px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.five-min{width:17280px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.one-min{width:86400px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-list{position:relative;background-color:#ccc;height:48px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-day{height:100%;overflow:hidden}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-one-wrap{height:8px;z-index:1}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-wrap{height:25px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-btns{display:flex;align-items:center}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one{float:left;width:1px;height:8px;margin:0;cursor:default;position:relative;z-index:1}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one.active,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one.active{background-color:orange;cursor:pointer}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one.start,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one.start{background-color:#999}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one:hover .jb-pro-playback-time-title-tips,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one:hover .jb-pro-playback-time-title-tips{visibility:visible;opacity:1}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips{pointer-events:none;position:absolute;left:0;top:100%;visibility:hidden;opacity:0;transform:translateX(13%);transition:visibility .3s ease 0s,opacity .3s ease 0s;background-color:#000;border-radius:4px;z-index:1}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips.jb-pro-playback-time-title-tips-left{transform:translateX(-100%)}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips .jb-pro-playback-time-title{display:inline-block;padding:2px 5px;font-size:12px;white-space:nowrap;color:#fff}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute{float:left;position:relative;width:60px;box-sizing:border-box;border-top:1px solid #999;-webkit-user-select:none;-moz-user-select:none;user-select:none;text-align:left;height:25px;line-height:25px}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:first-child,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:first-child{border-left:0}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:first-child .jb-pro-playback-time-hour-text,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:first-child .jb-pro-playback-time-hour-text{left:0}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:after,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:after{content:\"\";position:absolute;left:0;top:-8px;width:1px;height:14px;background-color:#999}.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour-text,.jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-text{position:absolute;left:-13px}.jb-pro-container-playback .jb-pro-playback-expand.disabled .jb-pro-icon-expand,.jb-pro-container-playback .jb-pro-playback-narrow.disabled .jb-pro-icon-narrow{cursor:no-drop}.jb-pro-container-playback .jb-pro-control-progress-simple{position:absolute;box-sizing:border-box;left:0;top:-2px;width:100%;display:flex;flex-direction:row;align-items:center;height:8px;cursor:pointer}.jb-pro-container-playback .jb-pro-control-progress-simple:hover{top:0;align-items:flex-start}.jb-pro-container-playback .jb-pro-control-progress-simple:hover .jb-pro-control-progress-inner{height:100%}.jb-pro-container-playback .jb-pro-control-progress-simple:hover .jb-pro-control-progress-inner .jb-pro-progress-indicator{transform:scale(1);visibility:visible}.jb-pro-container-playback .jb-pro-control-progress-inner{display:flex;align-items:center;position:relative;height:50%;width:100%;transition:all .2s ease;background:hsla(0,0%,100%,.5)}.jb-pro-container-playback .jb-pro-progress-hover{display:none;width:0}.jb-pro-container-playback .jb-pro-progress-played{position:absolute;left:0;top:0;right:0;bottom:0;height:100%;width:0;background-color:orange}.jb-pro-container-playback .jb-pro-progress-indicator{visibility:hidden;align-items:center;justify-content:center;position:absolute;z-index:40;border-radius:50%;transform:scale(.1);transition:transform .1s ease-in-out}.jb-pro-container-playback .jb-pro-progress-indicator .jb-pro-icon{width:100%;height:100%;pointer-events:none;-webkit-user-select:none;-moz-user-select:none;user-select:none}.jb-pro-container-playback .jb-pro-progress-indicator:hover{transform:scale(1.2)!important}.jb-pro-container-playback .jb-pro-progress-tip{display:none;position:absolute;z-index:50;top:-25px;left:0;height:20px;padding:0 5px;line-height:20px;color:#fff;font-size:12px;text-align:center;background:rgba(0,0,0,.7);border-radius:3px;font-weight:700;white-space:nowrap}.jb-pro-container-playback.jb-pro-fullscreen-web .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-playback-time-inner{overflow-y:auto}.jb-pro-zoom-control{cursor:grab}.jb-pro-performance-panel{position:absolute;box-sizing:border-box;z-index:10000;left:0;top:0;padding:5px;font-size:10px;background:rgba(0,0,0,.2);color:#fff;max-height:100%;overflow-y:auto;display:none}.jb-pro-performance-panel .jb-pro-performance-item{display:flex;align-items:center;margin-top:3px;color:#fff}.jb-pro-performance-panel .jb-pro-performance-item-block{height:10px}.jb-pro-tips-message{position:absolute;top:0;left:0;width:100%;height:100%;background:linear-gradient(180deg,rgba(0,0,0,.8),hsla(0,0%,100%,0));overflow:auto;box-sizing:content-box;display:none}.jb-pro-tips-message:before{color:hsla(0,0%,100%,.3);content:\"X\";font-family:Arial,Helvetica,sans-serif;font-size:40px;left:0;line-height:1;margin-top:-20px;position:absolute;text-shadow:2em 2em 4em #000;text-align:center;top:50%;vertical-align:middle;width:100%}.jb-pro-tips-message .jb-pro-tips-message-close{position:absolute;z-index:99999;right:0;top:0;width:40px;height:40px;display:flex;align-items:center;justify-content:center}.jb-pro-tips-message .jb-pro-tips-message-close .jb-pro-tips-message-close-icon{width:20px;height:20px;border-radius:10px;cursor:pointer;background:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAeUExURUdwTM3Nzc7Ozs7Ozs3Nzc3NzcrKys3Nzc3Nzc3NzePZJxkAAAAJdFJOUwCtKdPBGAmNTt3jdDcAAADfSURBVDjL1dOxDoIwEADQgwR07GTCZtI/IGHgAzBhM9EPkMTB0Y3V0ZXIcn/rtRR6pefgSKeSu3ellyvA9lZ/5F9p/3K7PZY8oPG5BD6MpPUSgIITzdIStifAshjRQV1PCFT8TxaicTzzwEwINOEdHVmDmcTAkRhMhMAp7iQRjcMtDhCp8SA1v0ARGIIK/gnkv0p1OBTS4QRUIpE7DiYYXTBrzcld3JIrAarXrps4AVNwRSZgExoJmIyAaAdsShUMn/JF2fh4YEkpAcgvnuwYCIb6EbbbP4PsDfLD2dD6Av1qTvAQlzUTAAAAAElFTkSuQmCC\") no-repeat 50%;background-color:#fff;background-size:100% 100%}.jb-pro-tips-message .jb-pro-tips-message-content{overflow:auto;padding:35px;box-sizing:border-box;width:100%;height:100%}.jb-pro-tips-message .jb-pro-tips-message-content .jb-pro-tips-message-content-item{font-size:14px;color:#fff;text-align:center;line-height:1.5}\n/*# sourceMappingURL=data:application/json;base64,{"version":3,"sources":["style.scss"],"names":[],"mappings":"AAAA,oBACE,GACE,8BAAiC,CACnC,GACE,+BAAmC,CAAE,CAEzC,wBACE,GACE,wBAAyB,CACzB,+BAAkC,CACpC,IACE,wBAAyB,CACzB,mCAAsC,CACxC,GACE,wBAAyB,CACzB,+BAAkC,CAAE,CAExC,gDACE,sBAA0B,CAE5B,+BACE,cAAe,CACf,UAAW,CACX,WAAY,CACZ,oBAAuB,CAEzB,uCACE,iBAAkB,CAClB,WAAY,CACZ,YAAa,CACb,iBAAkB,CAClB,SAAU,CACV,kBAAmB,CACnB,03GAAwD,CACxD,yBAA0B,CAC1B,qCAA2C,CAC3C,UAAW,CACX,YAAe,CACf,qDACE,UAAa,CACb,sEACE,MAAO,CACP,SAAY,CAChB,kDACE,WAAc,CACd,mEACE,gBAAiB,CACjB,UAAW,CACX,KAAM,CACN,YAAa,CACb,qBAAsB,CACtB,sBAAyB,CAC7B,gEACE,kBAAmB,CACnB,SAAY,CAEhB,wCACE,iBAAkB,CAClB,SAAU,CACV,WAAY,CACZ,YAAa,CACb,s4RAA+D,CAC/D,yBAA4B,CAC5B,gEACE,wBAA2B,CAC7B,kEACE,wBAA2B,CAC7B,kEACE,uBAA0B,CAC5B,qEACE,yBAA4B,CAC9B,sEACE,wBAA2B,CAC7B,uEACE,wBAA2B,CAC7B,wEACE,uBAA0B,CAC5B,kEACE,kBAAmB,CACnB,SAAY,CAEhB,sCACE,iBAAkB,CAClB,SAAU,CACV,QAAS,CACT,UAAW,CACX,WAAY,CACZ,eAAmB,CACnB,iBAAkB,CAClB,2BAAmC,CACnC,8DACE,SAAY,CACd,4DACE,QAAW,CACb,+DACE,SAAY,CACd,8DACE,QAAW,CACb,iEACE,QAAS,CACT,SAAY,CACd,mEACE,SAAU,CACV,QAAW,CACb,kEACE,QAAS,CACT,SAAY,CACd,oEACE,QAAS,CACT,SAAY,CAEhB,mCACE,iBAAoB,CACpB,0DACE,kBAAmB,CACnB,SAAY,CAEhB,mCACE,aAAc,CACd,iBAAkB,CAClB,MAAO,CACP,SAAU,CACV,WAAY,CACZ,qBAAsB,CACtB,cAAiB,CACjB,mDACE,YAAa,CACb,6BAAgC,CAEpC,6DACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,cAAe,CACf,keAA4D,CAC5D,yBAA4B,CAE9B,mEACE,keAAkE,CAClE,yBAA4B,CAE9B,6DACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,sbAA4D,CAC5D,yBAA0B,CAC1B,cAAiB,CAEnB,mEACE,kbAAkE,CAClE,yBAA4B,CAE9B,qEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,0mBAAkE,CAClE,yBAA0B,CAC1B,cAAiB,CAEnB,2EACE,skBAAwE,CACxE,yBAA4B,CAE9B,sEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,8lBAAmE,CACnE,yBAA0B,CAC1B,cAAiB,CAEnB,4EACE,8iBAAyE,CACzE,yBAA4B,CAE9B,+DACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,siBAA+D,CAC/D,yBAA0B,CAC1B,cAAiB,CAEnB,qEACE,kjBAAqE,CACrE,yBAA4B,CAE9B,gEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,0kBAAgE,CAChE,yBAA0B,CAC1B,cAAiB,CAEnB,sEACE,kkBAAsE,CACtE,yBAA4B,CAE9B,iEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,k+BAAkE,CAClE,yBAA0B,CAC1B,cAAiB,CAEnB,uEACE,khCAAwE,CACxE,yBAA4B,CAE9B,kEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,81BAAmE,CACnE,yBAA0B,CAC1B,cAAiB,CAEnB,wEACE,82BAAyE,CACzE,yBAA4B,CAE9B,8DACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,skCAAgE,CAChE,yBAA0B,CAC1B,cAAiB,CAEnB,oEACE,kmCAAsE,CACtE,yBAA4B,CAE9B,+DACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,85BAAiE,CACjE,yBAA0B,CAC1B,cAAiB,CAEnB,qEACE,87BAAuE,CACvE,yBAA4B,CAE9B,gEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,krCAAgE,CAChE,yBAA0B,CAC1B,cAAiB,CAEnB,sEACE,0wCAAsE,CACtE,yBAA4B,CAE9B,iEACE,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,8yCAAiE,CACjE,yBAA0B,CAC1B,cAAiB,CAEnB,uEACE,01CAAuE,CACvE,yBAA4B,CAE9B,oCACE,cAAe,CACf,iBAAkB,CAClB,OAAQ,CACR,QAAW,CAEb,uCACE,SAAU,CACV,QAAS,CAET,4BAAiC,CAAjC,6BAAmC,CAErC,0CACE,QAAS,CACT,UAAW,CAEX,4BAA+B,CAA/B,2BAAiC,CAEnC,yCACE,SAAU,CACV,QAAS,CAET,4BAAgC,CAAhC,4BAAkC,CAEpC,yCACE,SAAU,CACV,WAAY,CAEZ,4BAA8B,CAA9B,0BAAgC,CAElC,4CACE,uBAAwB,CACxB,SAAU,CACV,QAAS,CAET,4BAAgC,CAAhC,4BAAkC,CAEpC,6CACE,wBAAyB,CACzB,UAAW,CACX,QAAS,CAET,4BAA+B,CAA/B,2BAAiC,CAEnC,8CACE,uBAAwB,CACxB,SAAU,CACV,WAAY,CAEZ,4BAA8B,CAA9B,0BAAgC,CAElC,+CACE,wBAAyB,CACzB,UAAW,CACX,WAAY,CAEZ,4BAA8B,CAA9B,0BAAgC,CAElC,qCACE,YAYsB,CAExB,sEAbE,iBAAkB,CAClB,UAAW,CACX,MAAO,CACP,KAAM,CACN,OAAQ,CACR,QAAS,CACT,WAAY,CACZ,UAAW,CACX,uBAAkC,CAClC,2BAA4B,CAC5B,uBAAwB,CACxB,mBAcsB,CAExB,mCACE,iBAAkB,CAClB,YAAa,CACb,WAAY,CACZ,UAAW,CACX,SAAU,CACV,yBAAgC,CAChC,yCACE,cAAe,CACf,UAAW,CACX,iBAAkB,CAClB,QAAS,CACT,OAAQ,CACR,8BAAgC,CAChC,aAAc,CACd,UAAW,CACX,WAAY,CACZ,kZAA2C,CAC3C,2BAA4B,CAC5B,uBAA6B,CAC/B,+CACE,0aAAmD,CAEvD,oCACE,YAAa,CACb,iBAAkB,CAClB,qBAAsB,CACtB,QAAS,CACT,KAAM,CACN,aAAc,CACd,0BAA2B,CAC3B,4BAA6B,CAC7B,kBAAmB,CACnB,WAAY,CACZ,WAAY,CACZ,eAAmB,CACnB,SAAU,CACV,yBAA8B,CAC9B,SAAY,CACZ,gEACE,SAAU,CACV,UAAW,CACX,kBAAmB,CACnB,iBAAkB,CAClB,yCAA4C,CAC9C,2DACE,cAAe,CACf,eAAgB,CAChB,UAAgB,CAClB,2DACE,WAAc,CAChB,4DACE,UAAW,CACX,WAAY,CACZ,cAAiB,CAErB,wCACE,YAAa,CACb,iBAAkB,CAClB,qBAAsB,CACtB,QAAS,CACT,KAAM,CACN,aAAc,CACd,0BAA2B,CAC3B,4BAA6B,CAC7B,kBAAmB,CACnB,WAAY,CACZ,WAAY,CACZ,eAAmB,CACnB,SAAU,CACV,yBAA8B,CAC9B,SAAY,CACZ,qDACE,kBAAqB,CACvB,4DACE,UAAW,CACX,WAAY,CACZ,cAAiB,CACnB,0DACE,cAAe,CACf,eAAgB,CAChB,UAAgB,CAKlB,uHACE,UAAW,CACX,WAAY,CACZ,cAAiB,CAErB,kCACE,YAAa,CACb,qBAAsB,CACtB,sBAAuB,CACvB,kBAAmB,CACnB,iBAAkB,CAClB,UAAW,CACX,MAAO,CACP,KAAM,CACN,OAAQ,CACR,QAAS,CACT,UAAW,CACX,WAAY,CACZ,mBAAsB,CAExB,uCACE,gBAAiB,CACjB,cAAe,CACf,UAAW,CACX,eAAkB,CAEpB,mCACE,wBAAyB,CACzB,qBAAsB,CACtB,YAAa,CACb,qBAAsB,CACtB,wBAAyB,CACzB,iBAAkB,CAClB,UAAW,CACX,MAAO,CACP,OAAQ,CACR,QAAS,CACT,WAAY,CACZ,UAAW,CACX,iBAAkB,CAClB,kBAAmB,CACnB,cAAe,CACf,UAAW,CACX,SAAU,CACV,iBAAkB,CAClB,8BAAgC,CAChC,wBAAiB,CAAjB,qBAAiB,CAAjB,gBAAiB,CACjB,4BAA+B,CAC/B,yDACE,iBAAkB,CAClB,YAAa,CACb,sBAAuB,CACvB,aAAgB,CAChB,gFACE,kBAAmB,CACnB,SAAY,CAmDd,s5DACE,YAAe,CACjB,sEACE,SAAY,CAChB,8DACE,iBAAkB,CAClB,YAAa,CACb,sBAAyB,CAC3B,iEACE,iBAAkB,CAClB,sBAAuB,CACvB,aAAgB,CAClB,2GACE,SAAY,CACd,2DACE,YAAa,CACb,6BAA8B,CAC9B,WAAc,CAId,mKAFE,YAAa,CACb,kBAGqB,CAE3B,wDACE,SAAU,CACV,kBAAqB,CAEvB,kEACE,UAAY,CACZ,kBAAmB,CACnB,YAAe,CAEjB,uCACE,qBAAyB,CAE3B,uCACE,UAAW,CACX,WAAY,CACZ,8vBAA8C,CAC9C,uBAAkC,CAClC,2BAA4B,CAC5B,yBAA0B,CAC1B,uBAAwB,CACxB,qBAAsB,CACtB,gCAAiC,CACjC,kCAAqC,CAEvC,0CACE,kmBAA4D,CAC5D,yBAA4B,CAC5B,gDACE,kmBAAkE,CAClE,yBAA4B,CAEhC,oCACE,0ZAAsD,CACtD,yBAA4B,CAC5B,0CACE,kbAA4D,CAC5D,yBAA4B,CAEhC,qCACE,0QAAuD,CACvD,yBAA4B,CAC5B,2CACE,0QAA6D,CAC7D,yBAA4B,CAEhC,sCACE,8bAAwD,CACxD,yBAA4B,CAC5B,4CACE,0dAA8D,CAC9D,yBAA4B,CAEhC,0CACE,s1BAA6D,CAC7D,yBAA4B,CAC5B,gDACE,s1BAAmE,CACnE,yBAA4B,CAEhC,0CACE,8wBAA4D,CAC5D,yBAA4B,CAC5B,gDACE,syBAAkE,CAClE,yBAA4B,CAEhC,8CACE,keAAiE,CACjE,yBAA4B,CAC5B,oDACE,8gBAAuE,CACvE,yBAA4B,CAEhC,qCACE,kbAAuD,CACvD,yBAA4B,CAC5B,2CACE,0bAA6D,CAC7D,yBAA4B,CAEhC,oCACE,84BAAsD,CACtD,yBAA4B,CAC5B,0CACE,k9BAA4D,CAC5D,yBAA4B,CAEhC,mCACE,s/BAAqD,CACrD,yBAA4B,CAC5B,yCACE,0nCAA2D,CAC3D,yBAA4B,CAEhC,yCACE,kkCAA4D,CAC5D,yBAA4B,CAC5B,+CACE,kiCAAkE,CAClE,yBAA4B,CAEhC,2CACE,8qBAA6D,CAC7D,yBAA4B,CAC5B,iDACE,ksBAAmE,CACnE,yBAA4B,CAEhC,iDACE,ksBAAoE,CACpE,yBAA4B,CAC5B,uDACE,8qBAA0E,CAC1E,yBAA4B,CAEhC,oCACE,8jBAAsD,CACtD,yBAA4B,CAC5B,0CACE,slBAA4D,CAC5D,yBAA4B,CAEhC,0CACE,skBAA6D,CAC7D,yBAA4B,CAC5B,gDACE,8kBAAmE,CACnE,yBAA4B,CAEhC,sCACE,s2CAAwD,CACxD,yBAA4B,CAC5B,4CACE,8pCAA8D,CAC9D,yBAA4B,CAEhC,4CACE,86CAA+D,CAC/D,yBAA4B,CAC5B,kDACE,85CAAqE,CACrE,yBAA4B,CAEhC,yCACE,k3FAA2D,CAC3D,yBAA4B,CAC5B,+CACE,81EAAiE,CACjE,yBAA4B,CAEhC,+CACE,0/FAAkE,CAClE,yBAA4B,CAC5B,qDACE,8gGAAwE,CACxE,yBAA4B,CAEhC,uCACE,swCAA0D,CAC1D,yBAA4B,CAC5B,6CACE,8lCAAgE,CAChE,yBAA4B,CAEhC,oCACE,8tBAAsD,CACtD,yBAA4B,CAC5B,0CACE,suBAA4D,CAC5D,yBAA4B,CAEhC,wCACE,0uBAA2D,CAC3D,yBAA4B,CAC5B,8CACE,svBAAiE,CACjE,yBAA4B,CAEhC,qCACE,siBAAuD,CACvD,yBAA4B,CAC5B,2CACE,kiBAA6D,CAC7D,yBAA4B,CAEhC,sCACE,syEAAwD,CACxD,yBAA4B,CAC5B,4CACE,03DAA8D,CAC9D,yBAA4B,CAEhC,sCACE,s9EAAwD,CACxD,yBAA4B,CAC5B,4CACE,k/DAA8D,CAC9D,yBAA4B,CAEhC,yKACE,cAAe,CACf,cAAe,CACf,WAAY,CACZ,gBAAiB,CACjB,cAAe,CACf,iBAAoB,CAEtB,gCACE,qBAAsB,CACtB,iBAAkB,CAClB,cAAe,CACf,UAAW,CACX,UAAa,CAEf,oKACE,iBAAkB,CAClB,QAAS,CACT,WAAY,CACZ,iBAAkB,CAClB,SAAU,CACV,0BAA2B,CAC3B,qCAA2C,CAC3C,+BAAoC,CACpC,iBAAkB,CAClB,eAAgB,CAChB,sBAAkB,CAAlB,iBAAoB,CACpB,ggCACE,kBAAmB,CACnB,SAAY,CAEhB,mCACE,mBAAoB,CACpB,iBAAkB,CAClB,QAAS,CACT,WAAY,CACZ,iBAAkB,CAClB,SAAU,CACV,0BAA2B,CAC3B,qDAA2D,CAC3D,+BAAoC,CACpC,iBAAoB,CAEtB,8BACE,oBAAqB,CACrB,gBAAiB,CACjB,cAAe,CACf,kBAAmB,CACnB,UAAc,CAEhB,uCACE,aAAgB,CAElB,oKACE,aAAc,CACd,WAAY,CACZ,gBAAiB,CACjB,QAAS,CACT,cAAe,CACf,cAAe,CACf,cAAe,CACf,iBAAkB,CAClB,UAAW,CACX,wBAA+B,CAC/B,yCAAiD,CACjD,4LACE,mCAA4C,CAC9C,4LACE,YAAe,CACjB,gmCACE,aAAgB,CAEpB,4CACE,iBAAkB,CAClB,QAAS,CACT,WAAY,CACZ,iBAAkB,CAClB,SAAU,CACV,0CAA2C,CAC3C,qCAA2C,CAC3C,+BAAoC,CACpC,iBAAkB,CAClB,YAAa,CACb,UAAW,CACX,eAAkB,CAClB,0EACE,kBAAmB,CACnB,SAAY,CAEhB,uCACE,cAAe,CACf,iBAAkB,CAClB,QAAS,CACT,WAAY,CACZ,UAAW,CACX,eAAkB,CAEpB,4CACE,iBAAkB,CAClB,MAAO,CACP,KAAM,CACN,UAAW,CACX,WAAY,CACZ,gBAAiB,CACjB,iBAAkB,CAClB,UAAW,CACX,cAAiB,CAEnB,8CACE,iBAAkB,CAClB,QAAS,CACT,QAAS,CACT,UAAW,CACX,WAAY,CACZ,kBAAmB,CACnB,gBAAiB,CACjB,eAAkB,CAClB,qDACE,YAAa,CACb,eAAkB,CACpB,oDACE,UAAW,CACX,6BAAsC,CACxC,yGACE,UAAW,CACX,iBAAkB,CAClB,aAAc,CACd,QAAS,CACT,SAAU,CACV,gBAAiB,CACjB,WAAc,CAElB,yDACE,WAAc,CAEhB,+DACE,6CAAiD,CAEnD,wDACE,kBAAqB,CAEvB,6DACE,wBAA2B,CAE7B,uCACE,YAAa,CACb,qBAAsB,CACtB,iBAAkB,CAClB,WAAY,CACZ,SAAU,CACV,QAAS,CACT,eAAgB,CAChB,aAAc,CACd,+BAAoC,CACpC,iBAAoB,CACpB,2DACE,cAAe,CACf,cAAe,CACf,aAAc,CACd,UAAW,CACX,iBAAkB,CAClB,eAAgB,CAChB,sBAAuB,CACvB,kBAAmB,CACnB,kCAAuC,CACvC,0CAAmD,CACnD,6DACE,UAAW,CACX,oBAAuB,CACzB,gEACE,oBAAqB,CACrB,aAAgB,CAChB,kJACE,kBAAqB,CACzB,iEACE,mCAA4C,CAC9C,sEACE,kBAAqB,CAE3B,gEACE,YAAe,CAEjB,qCACE,aAAc,CACd,iBAAkB,CAClB,UAAW,CACX,WAAY,CACZ,YAAe,CAEjB,4CACE,WAAc,CACd,4FACE,MAAO,CACP,YAAa,CACb,qBAAsB,CACtB,6BAA8B,CAC9B,cAAiB,CACjB,2HACE,qBAAsB,CACtB,MAAO,CACP,iBAAkB,CAClB,WAAc,CAChB,iIACE,WAAY,CACZ,WAAY,CACZ,iBAAkB,CAClB,eAAkB,CACpB,mIACE,iBAAkB,CAClB,MAAS,CACT,KAAM,CACN,WAAY,CACZ,SAAU,CACV,oBAAqB,CACrB,iBAAkB,CAClB,SAAY,CACd,wIACE,iBAAkB,CAClB,qBAAsB,CACtB,aAAc,CACd,UAAW,CACX,UAAW,CACX,QAAS,CACT,oBAAqB,CACrB,WAAY,CACZ,gBAAiB,CACjB,WAAY,CACZ,qBAAsB,CACtB,UAAW,CAEX,wBAAyB,CACzB,qBAAsB,CACtB,gBAAmB,CACrB,kIACE,iBAAkB,CAClB,YAAa,CACb,aAAgB,CAChB,2IACE,YAAe,CACjB,4IACE,YAAe,CACjB,0IACE,YAAe,CACjB,2IACE,aAAgB,CAClB,0IACE,aAAgB,CACpB,gIACE,iBAAkB,CAClB,qBAAsB,CACtB,WAAc,CAChB,sHACE,WAAY,CACZ,eAAkB,CACpB,2HACE,UAAW,CACX,SAAY,CACd,8HACE,WAAc,CAChB,2HACE,YAAa,CACb,kBAAqB,CACvB,0PACE,UAAW,CACX,SAAU,CACV,UAAW,CACX,QAAS,CACT,cAAe,CACf,iBAAkB,CAClB,SAAY,CACZ,wQACE,uBAAwB,CACxB,cAAiB,CACnB,sQACE,qBAAwB,CAC1B,wUACE,kBAAmB,CACnB,SAAY,CAChB,6HACE,mBAAoB,CACpB,iBAAkB,CAClB,MAAO,CACP,QAAS,CACT,iBAAkB,CAClB,SAAU,CACV,yBAA0B,CAC1B,qDAA2D,CAC3D,qBAAuB,CACvB,iBAAkB,CAClB,SAAY,CACZ,kKACE,2BAA8B,CAChC,yJACE,oBAAqB,CACrB,eAAgB,CAChB,cAAe,CACf,kBAAmB,CACnB,UAAc,CAClB,gPACE,UAAW,CACX,iBAAkB,CAClB,UAAW,CACX,qBAAsB,CACtB,yBAA0B,CAE1B,wBAAyB,CACzB,qBAAsB,CACtB,gBAAiB,CACjB,eAAgB,CAChB,WAAY,CACZ,gBAAmB,CACnB,wQACE,aAAgB,CAChB,wUACE,MAAS,CACb,4PACE,UAAW,CACX,iBAAkB,CAClB,MAAO,CACP,QAAS,CACT,SAAU,CACV,WAAY,CACZ,qBAAwB,CAC5B,0PACE,iBAAkB,CAClB,UAAa,CAKnB,gKACE,cAAiB,CAEnB,2DACE,iBAAkB,CAClB,qBAAsB,CACtB,MAAO,CACP,QAAS,CACT,UAAW,CACX,YAAa,CACb,kBAAmB,CACnB,kBAAmB,CACnB,UAAW,CACX,cAAiB,CACjB,iEACE,KAAQ,CACR,sBAAyB,CACzB,gGACE,WAAc,CACd,2HACE,kBAAsB,CACtB,kBAAqB,CAE7B,0DACE,YAAa,CACb,kBAAmB,CACnB,iBAAkB,CAClB,UAAW,CACX,UAAW,CACX,uBAAyB,CACzB,6BAAsC,CAExC,kDACE,YAAa,CACb,OAAW,CAEb,mDACE,iBAAkB,CAClB,MAAO,CACP,KAAM,CACN,OAAQ,CACR,QAAS,CACT,WAAY,CACZ,OAAQ,CACR,uBAA0B,CAE5B,sDACE,iBAAkB,CAClB,kBAAmB,CACnB,sBAAuB,CACvB,iBAAkB,CAClB,UAAW,CACX,iBAAkB,CAClB,mBAA0B,CAC1B,oCAAwC,CACxC,mEACE,UAAW,CACX,WAAY,CACZ,mBAAoB,CACpB,wBAAiB,CAAjB,qBAAiB,CAAjB,gBAAmB,CACrB,4DACE,8BAAuC,CAE3C,gDACE,YAAa,CACb,iBAAkB,CAClB,UAAW,CACX,SAAU,CACV,MAAO,CACP,WAAY,CACZ,aAAc,CACd,gBAAiB,CACjB,UAAW,CACX,cAAe,CACf,iBAAkB,CAClB,yBAA8B,CAC9B,iBAAkB,CAClB,eAAiB,CACjB,kBAAqB,CAEvB,+HACE,eAAkB,CAEpB,qBACE,WAAc,CAEhB,0BACE,iBAAkB,CAClB,qBAAsB,CACtB,aAAc,CACd,MAAO,CACP,KAAM,CACN,WAAY,CACZ,cAAe,CACf,yBAA8B,CAC9B,UAAW,CACX,eAAgB,CAChB,eAAgB,CAChB,YAAe,CACf,mDACE,YAAa,CACb,kBAAmB,CACnB,cAAe,CACf,UAAc,CAChB,yDACE,WAAc,CAElB,qBACE,iBAAkB,CAClB,KAAM,CACN,MAAO,CACP,UAAW,CACX,WAAY,CACZ,mEAA+E,CAC/E,aAAc,CACd,sBAAuB,CACvB,YAAe,CACf,4BACE,wBAA+B,CAC/B,WAAY,CACZ,sCAAyC,CACzC,cAAe,CACf,MAAO,CACP,aAAc,CACd,gBAAiB,CACjB,iBAAkB,CAClB,4BAA6B,CAC7B,iBAAkB,CAClB,OAAQ,CACR,qBAAsB,CACtB,UAAa,CACf,gDACE,iBAAkB,CAClB,aAAc,CACd,OAAU,CACV,KAAQ,CACR,UAAW,CACX,WAAY,CACZ,YAAa,CACb,kBAAmB,CACnB,sBAAyB,CACzB,gFACE,UAAW,CACX,WAAY,CACZ,kBAAmB,CACnB,cAAe,CACf,siBAAuD,CACvD,qBAAsB,CACtB,yBAA4B,CAChC,kDACE,aAAc,CACd,YAAa,CACb,qBAAsB,CACtB,UAAW,CACX,WAAc,CACd,oFACE,cAAe,CACf,UAAW,CACX,iBAAkB,CAClB,eAAkB","file":"style.scss","sourcesContent":["@keyframes rotation {\n  from {\n    -webkit-transform: rotate(0deg); }\n  to {\n    -webkit-transform: rotate(360deg); } }\n\n@keyframes magentaPulse {\n  from {\n    background-color: #630030;\n    -webkit-box-shadow: 0 0 9px #333; }\n  50% {\n    background-color: #a9014b;\n    -webkit-box-shadow: 0 0 18px #a9014b; }\n  to {\n    background-color: #630030;\n    -webkit-box-shadow: 0 0 9px #333; } }\n\n.jb-pro-container video::-webkit-media-controls {\n  display: none !important; }\n\n.jb-pro-container .jb-pro-icon {\n  cursor: pointer;\n  width: 16px;\n  height: 16px;\n  display: inline-block; }\n\n.jb-pro-container .jb-pro-ptz-controls {\n  position: absolute;\n  width: 156px;\n  height: 156px;\n  visibility: hidden;\n  opacity: 0;\n  border-radius: 78px;\n  background: url(\"../assets/ptz-bg.png\") no-repeat center;\n  background-size: 100% 100%;\n  transition: visibility 300ms, opacity 300ms;\n  right: 43px;\n  bottom: 135px; }\n  .jb-pro-container .jb-pro-ptz-controls.show-vertical {\n    right: 43px; }\n    .jb-pro-container .jb-pro-ptz-controls.show-vertical .jb-pro-ptz-btns {\n      left: 0;\n      top: 156px; }\n  .jb-pro-container .jb-pro-ptz-controls.show-level {\n    right: 163px; }\n    .jb-pro-container .jb-pro-ptz-controls.show-level .jb-pro-ptz-btns {\n      min-height: 156px;\n      left: 144px;\n      top: 0;\n      display: flex;\n      flex-direction: column;\n      justify-content: center; }\n  .jb-pro-container .jb-pro-ptz-controls.jb-pro-ptz-controls-show {\n    visibility: visible;\n    opacity: 1; }\n\n.jb-pro-container .jb-pro-ptz-bg-active {\n  visibility: hidden;\n  opacity: 0;\n  width: 156px;\n  height: 156px;\n  background: url(\"../assets/ptz-bg-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-up {\n    transform: rotate(-90deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left {\n    transform: rotate(180deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-down {\n    transform: rotate(90deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left-up {\n    transform: rotate(-135deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-right-up {\n    transform: rotate(-45deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-left-down {\n    transform: rotate(135deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-right-down {\n    transform: rotate(45deg); }\n  .jb-pro-container .jb-pro-ptz-bg-active.jb-pro-ptz-bg-active-show {\n    visibility: visible;\n    opacity: 1; }\n\n.jb-pro-container .jb-pro-ptz-control {\n  position: absolute;\n  left: 53px;\n  top: 53px;\n  width: 50px;\n  height: 50px;\n  background: #FFFFFF;\n  border-radius: 50%;\n  transition: left 300ms, top 300ms; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left {\n    left: 33px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-up {\n    top: 33px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right {\n    left: 73px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-down {\n    top: 73px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left-up {\n    top: 39px;\n    left: 39px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-left-down {\n    left: 39px;\n    top: 67px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right-up {\n    top: 39px;\n    left: 67px; }\n  .jb-pro-container .jb-pro-ptz-control.jb-pro-ptz-control-right-down {\n    top: 67px;\n    left: 67px; }\n\n.jb-pro-container .jb-pro-ptz-icon {\n  position: relative; }\n  .jb-pro-container .jb-pro-ptz-icon:hover .icon-title-tips {\n    visibility: visible;\n    opacity: 1; }\n\n.jb-pro-container .jb-pro-ptz-btns {\n  display: block;\n  position: absolute;\n  left: 0;\n  top: 156px;\n  width: 156px;\n  box-sizing: border-box;\n  padding: 0 30px; }\n  .jb-pro-container .jb-pro-ptz-btns .jb-pro-ptz-btn {\n    display: flex;\n    justify-content: space-between; }\n\n.jb-pro-container .jb-pro-ptz-expand .jb-pro-ptz-expand-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  cursor: pointer;\n  background: url(\"../assets/ptz-expand.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-expand:hover .jb-pro-ptz-expand-icon {\n  background: url(\"../assets/ptz-expand-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-narrow .jb-pro-ptz-narrow-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-narrow.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-narrow:hover .jb-pro-ptz-narrow-icon {\n  background: url(\"../assets/ptz-narrow-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-aperture-far .jb-pro-ptz-aperture-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-aperture-far.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-aperture-far:hover .jb-pro-ptz-aperture-icon {\n  background: url(\"../assets/ptz-aperture-far-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-aperture-near .jb-pro-ptz-aperture-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-aperture-near.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-aperture-near:hover .jb-pro-ptz-aperture-icon {\n  background: url(\"../assets/ptz-aperture-near-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-focus-far .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-focus-far.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-focus-far:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-focus-far-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-focus-near .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-focus-near.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-focus-near:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-focus-near-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-cruise-play .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-xunhang-play.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-cruise-play:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-xunhang-play-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-cruise-pause .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-xunhang-pause.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-cruise-pause:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-xunhang-pause-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-fog-open .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-touwu-open.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-fog-open:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-touwu-open-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-fog-close .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-touwu-close.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-fog-close:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-touwu-close-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-wiper-open .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-wiper-open.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-wiper-open:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-wiper-open-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-wiper-close .jb-pro-ptz-focus-icon {\n  display: inline-block;\n  width: 28px;\n  height: 28px;\n  background: url(\"../assets/ptz-wiper-close.png\") no-repeat center;\n  background-size: 100% 100%;\n  cursor: pointer; }\n\n.jb-pro-container .jb-pro-ptz-wiper-close:hover .jb-pro-ptz-focus-icon {\n  background: url(\"../assets/ptz-wiper-close-hover.png\") no-repeat center;\n  background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-ptz-arrow {\n  cursor: pointer;\n  position: absolute;\n  width: 0;\n  height: 0; }\n\n.jb-pro-container .jb-pro-ptz-arrow-up {\n  left: 71px;\n  top: 15px;\n  border: 7px solid transparent;\n  border-bottom: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-right {\n  top: 71px;\n  right: 15px;\n  border: 7px solid transparent;\n  border-left: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-left {\n  left: 15px;\n  top: 71px;\n  border: 7px solid transparent;\n  border-right: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-down {\n  left: 71px;\n  bottom: 15px;\n  border: 7px solid transparent;\n  border-top: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-left-up {\n  transform: rotate(45deg);\n  left: 32px;\n  top: 33px;\n  border: 7px solid transparent;\n  border-right: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-right-up {\n  transform: rotate(-45deg);\n  right: 32px;\n  top: 33px;\n  border: 7px solid transparent;\n  border-left: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-left-down {\n  transform: rotate(45deg);\n  left: 32px;\n  bottom: 33px;\n  border: 7px solid transparent;\n  border-top: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-ptz-arrow-right-down {\n  transform: rotate(-45deg);\n  right: 32px;\n  bottom: 33px;\n  border: 7px solid transparent;\n  border-top: 10px solid #FFFFFF; }\n\n.jb-pro-container .jb-pro-loading-bg {\n  display: none;\n  position: absolute;\n  z-index: 10;\n  left: 0;\n  top: 0;\n  right: 0;\n  bottom: 0;\n  height: 100%;\n  width: 100%;\n  background-position: center center;\n  background-repeat: no-repeat;\n  background-size: contain;\n  pointer-events: none; }\n\n.jb-pro-container .jb-pro-poster {\n  position: absolute;\n  z-index: 10;\n  left: 0;\n  top: 0;\n  right: 0;\n  bottom: 0;\n  height: 100%;\n  width: 100%;\n  background-position: center center;\n  background-repeat: no-repeat;\n  background-size: contain;\n  pointer-events: none; }\n\n.jb-pro-container .jb-pro-play-big {\n  position: absolute;\n  display: none;\n  height: 100%;\n  width: 100%;\n  z-index: 1;\n  background: rgba(0, 0, 0, 0.4); }\n  .jb-pro-container .jb-pro-play-big:after {\n    cursor: pointer;\n    content: '';\n    position: absolute;\n    left: 50%;\n    top: 50%;\n    transform: translate(-50%, -50%);\n    display: block;\n    width: 48px;\n    height: 48px;\n    background-image: url(\"../assets/play.png\");\n    background-repeat: no-repeat;\n    background-position: center; }\n  .jb-pro-container .jb-pro-play-big:hover:after {\n    background-image: url(\"../assets/play-hover.png\"); }\n\n.jb-pro-container .jb-pro-recording {\n  display: none;\n  position: absolute;\n  box-sizing: border-box;\n  left: 50%;\n  top: 0;\n  padding: 0 3px;\n  transform: translateX(-50%);\n  justify-content: space-around;\n  align-items: center;\n  width: 101px;\n  height: 20px;\n  background: #000000;\n  opacity: 1;\n  border-radius: 0px 0px 8px 8px;\n  z-index: 1; }\n  .jb-pro-container .jb-pro-recording .jb-pro-recording-red-point {\n    width: 8px;\n    height: 8px;\n    background: #FF1F1F;\n    border-radius: 50%;\n    animation: magentaPulse 1s linear infinite; }\n  .jb-pro-container .jb-pro-recording .jb-pro-recording-time {\n    font-size: 14px;\n    font-weight: 500;\n    color: #DDDDDD; }\n  .jb-pro-container .jb-pro-recording .jb-pro-recording-stop {\n    height: 100%; }\n  .jb-pro-container .jb-pro-recording .jb-pro-icon-recordStop {\n    width: 16px;\n    height: 16px;\n    cursor: pointer; }\n\n.jb-pro-container .jb-pro-zoom-controls {\n  display: none;\n  position: absolute;\n  box-sizing: border-box;\n  left: 50%;\n  top: 0;\n  padding: 0 3px;\n  transform: translateX(-50%);\n  justify-content: space-around;\n  align-items: center;\n  width: 156px;\n  height: 30px;\n  background: #000000;\n  opacity: 1;\n  border-radius: 0px 0px 8px 8px;\n  z-index: 1; }\n  .jb-pro-container .jb-pro-zoom-controls .jb-pro-icon {\n    vertical-align: top; }\n  .jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-narrow {\n    width: 16px;\n    height: 16px;\n    cursor: pointer; }\n  .jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-tips {\n    font-size: 14px;\n    font-weight: 500;\n    color: #DDDDDD; }\n  .jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-expand {\n    width: 16px;\n    height: 16px;\n    cursor: pointer; }\n  .jb-pro-container .jb-pro-zoom-controls .jb-pro-zoom-stop2 {\n    width: 16px;\n    height: 16px;\n    cursor: pointer; }\n\n.jb-pro-container .jb-pro-loading {\n  display: none;\n  flex-direction: column;\n  justify-content: center;\n  align-items: center;\n  position: absolute;\n  z-index: 20;\n  left: 0;\n  top: 0;\n  right: 0;\n  bottom: 0;\n  width: 100%;\n  height: 100%;\n  pointer-events: none; }\n\n.jb-pro-container .jb-pro-loading-text {\n  line-height: 20px;\n  font-size: 13px;\n  color: #fff;\n  margin-top: 10px; }\n\n.jb-pro-container .jb-pro-controls {\n  background-color: #161616;\n  box-sizing: border-box;\n  display: flex;\n  flex-direction: column;\n  justify-content: flex-end;\n  position: absolute;\n  z-index: 40;\n  left: 0;\n  right: 0;\n  bottom: 0;\n  height: 38px;\n  width: 100%;\n  padding-left: 13px;\n  padding-right: 13px;\n  font-size: 14px;\n  color: #fff;\n  opacity: 0;\n  visibility: hidden;\n  transition: all 0.2s ease-in-out;\n  user-select: none;\n  transition: width .5s ease-in; }\n  .jb-pro-container .jb-pro-controls .jb-pro-controls-item {\n    position: relative;\n    display: flex;\n    justify-content: center;\n    padding: 0 8px; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item:hover .icon-title-tips {\n      visibility: visible;\n      opacity: 1; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-microphone-close {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-icon-audio {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-play {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-pause {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-fullscreen-exit {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-screenshot {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-ptz {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-performance {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-face {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-object {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-occlusion {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-quality-menu {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-speed-menu {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-template-menu {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-scale-menu {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-volume {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-ptz-active {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-performance-active {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-face-active {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-object-active {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-occlusion-active {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-record {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-fullscreen {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-record-stop {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-zoom {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-zoom-stop {\n      display: none; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-item.jb-pro-speed {\n      padding: 0; }\n  .jb-pro-container .jb-pro-controls .jb-pro-controls-item-html {\n    position: relative;\n    display: none;\n    justify-content: center; }\n  .jb-pro-container .jb-pro-controls .jb-pro-playback-control-time {\n    position: relative;\n    justify-content: center;\n    padding: 0 8px; }\n  .jb-pro-container .jb-pro-controls .jb-pro-icon-audio, .jb-pro-container .jb-pro-controls .jb-pro-icon-mute {\n    z-index: 1; }\n  .jb-pro-container .jb-pro-controls .jb-pro-controls-bottom {\n    display: flex;\n    justify-content: space-between;\n    height: 100%; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-left {\n      display: flex;\n      align-items: center; }\n    .jb-pro-container .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-right {\n      display: flex;\n      align-items: center; }\n\n.jb-pro-container.jb-pro-controls-show .jb-pro-controls {\n  opacity: 1;\n  visibility: visible; }\n\n.jb-pro-container.jb-pro-controls-show-auto-hide .jb-pro-controls {\n  opacity: 0.8;\n  visibility: visible;\n  display: none; }\n\n.jb-pro-container.jb-pro-hide-cursor * {\n  cursor: none !important; }\n\n.jb-pro-container .jb-pro-icon-loading {\n  width: 50px;\n  height: 50px;\n  background-image: url(\"../assets/loading.png\");\n  background-position: center center;\n  background-repeat: no-repeat;\n  background-size: 100% 100%;\n  animation-name: rotation;\n  animation-duration: 1s;\n  animation-timing-function: linear;\n  animation-iteration-count: infinite; }\n\n.jb-pro-container .jb-pro-icon-screenshot {\n  background: url(\"../assets/screenshot.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-screenshot:hover {\n    background: url(\"../assets/screenshot-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-play {\n  background: url(\"../assets/play.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-play:hover {\n    background: url(\"../assets/play-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-pause {\n  background: url(\"../assets/pause.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-pause:hover {\n    background: url(\"../assets/pause-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-record {\n  background: url(\"../assets/record.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-record:hover {\n    background: url(\"../assets/record-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-recordStop {\n  background: url(\"../assets/record-stop.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-recordStop:hover {\n    background: url(\"../assets/record-stop-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-fullscreen {\n  background: url(\"../assets/fullscreen.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-fullscreen:hover {\n    background: url(\"../assets/fullscreen-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-fullscreenExit {\n  background: url(\"../assets/exit-fullscreen.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-fullscreenExit:hover {\n    background: url(\"../assets/exit-fullscreen-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-audio {\n  background: url(\"../assets/audio.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-audio:hover {\n    background: url(\"../assets/audio-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-mute {\n  background: url(\"../assets/mute.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-mute:hover {\n    background: url(\"../assets/mute-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-ptz {\n  background: url(\"../assets/ptz.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-ptz:hover {\n    background: url(\"../assets/ptz-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-ptzActive {\n  background: url(\"../assets/ptz-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-ptzActive:hover {\n    background: url(\"../assets/ptz-active-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-performance {\n  background: url(\"../assets/performance.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-performance:hover {\n    background: url(\"../assets/performance-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-performanceActive {\n  background: url(\"../assets/performance-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-performanceActive:hover {\n    background: url(\"../assets/performance-active-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-face {\n  background: url(\"../assets/face.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-face:hover {\n    background: url(\"../assets/face-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-faceActive {\n  background: url(\"../assets/face-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-faceActive:hover {\n    background: url(\"../assets/face-active-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-object {\n  background: url(\"../assets/object.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-object:hover {\n    background: url(\"../assets/object-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-objectActive {\n  background: url(\"../assets/object-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-objectActive:hover {\n    background: url(\"../assets/object-active-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-occlusion {\n  background: url(\"../assets/occlusion.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-occlusion:hover {\n    background: url(\"../assets/occlusion-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-occlusionActive {\n  background: url(\"../assets/occlusion-active.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-occlusionActive:hover {\n    background: url(\"../assets/occlusion-active-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-logSave {\n  background: url(\"../assets/log-save.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-logSave:hover {\n    background: url(\"../assets/log-save-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-zoom {\n  background: url(\"../assets/zoom.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-zoom:hover {\n    background: url(\"../assets/zoom-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-zoomStop {\n  background: url(\"../assets/zoom-stop.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-zoomStop:hover {\n    background: url(\"../assets/zoom-stop-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-close {\n  background: url(\"../assets/close.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-close:hover {\n    background: url(\"../assets/close-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-narrow {\n  background: url(\"../assets/narrow.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-narrow:hover {\n    background: url(\"../assets/narrow-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-icon-expand {\n  background: url(\"../assets/expand.png\") no-repeat center;\n  background-size: 100% 100%; }\n  .jb-pro-container .jb-pro-icon-expand:hover {\n    background: url(\"../assets/expand-hover.png\") no-repeat center;\n    background-size: 100% 100%; }\n\n.jb-pro-container .jb-pro-quality-icon-text, .jb-pro-container .jb-pro-scale-icon-text, .jb-pro-container .jb-pro-menu-icon-text, .jb-pro-container .jb-pro-speed-icon-text {\n  font-size: 14px;\n  min-width: 30px;\n  height: 20px;\n  line-height: 20px;\n  cursor: pointer;\n  text-align: center; }\n\n.jb-pro-container .jb-pro-speed {\n  box-sizing: border-box;\n  text-align: center;\n  font-size: 14px;\n  color: #fff;\n  width: 90px; }\n\n.jb-pro-container .jb-pro-quality-menu-list, .jb-pro-container .jb-pro-scale-menu-list, .jb-pro-container .jb-pro-menu-list, .jb-pro-container .jb-pro-speed-menu-list {\n  position: absolute;\n  left: 50%;\n  bottom: 100%;\n  visibility: hidden;\n  opacity: 0;\n  transform: translateX(-50%);\n  transition: visibility 300ms, opacity 300ms;\n  background-color: rgba(0, 0, 0, 0.5);\n  border-radius: 4px;\n  overflow: hidden;\n  width: max-content; }\n  .jb-pro-container .jb-pro-quality-menu-list.jb-pro-quality-menu-shown, .jb-pro-container .jb-pro-quality-menu-list.jb-pro-scale-menu-shown, .jb-pro-container .jb-pro-quality-menu-list.jb-pro-menu-shown, .jb-pro-container .jb-pro-quality-menu-list.jb-pro-speed-menu-shown, .jb-pro-container .jb-pro-scale-menu-list.jb-pro-quality-menu-shown, .jb-pro-container .jb-pro-scale-menu-list.jb-pro-scale-menu-shown, .jb-pro-container .jb-pro-scale-menu-list.jb-pro-menu-shown, .jb-pro-container .jb-pro-scale-menu-list.jb-pro-speed-menu-shown, .jb-pro-container .jb-pro-menu-list.jb-pro-quality-menu-shown, .jb-pro-container .jb-pro-menu-list.jb-pro-scale-menu-shown, .jb-pro-container .jb-pro-menu-list.jb-pro-menu-shown, .jb-pro-container .jb-pro-menu-list.jb-pro-speed-menu-shown, .jb-pro-container .jb-pro-speed-menu-list.jb-pro-quality-menu-shown, .jb-pro-container .jb-pro-speed-menu-list.jb-pro-scale-menu-shown, .jb-pro-container .jb-pro-speed-menu-list.jb-pro-menu-shown, .jb-pro-container .jb-pro-speed-menu-list.jb-pro-speed-menu-shown {\n    visibility: visible;\n    opacity: 1; }\n\n.jb-pro-container .icon-title-tips {\n  pointer-events: none;\n  position: absolute;\n  left: 50%;\n  bottom: 100%;\n  visibility: hidden;\n  opacity: 0;\n  transform: translateX(-50%);\n  transition: visibility 300ms ease 0s, opacity 300ms ease 0s;\n  background-color: rgba(0, 0, 0, 0.5);\n  border-radius: 4px; }\n\n.jb-pro-container .icon-title {\n  display: inline-block;\n  padding: 5px 10px;\n  font-size: 12px;\n  white-space: nowrap;\n  color: white; }\n\n.jb-pro-container .jb-pro-quality-menu {\n  padding: 8px 0; }\n\n.jb-pro-container .jb-pro-quality-menu-item, .jb-pro-container .jb-pro-scale-menu-item, .jb-pro-container .jb-pro-speed-menu-item, .jb-pro-container .jb-pro-menu-item {\n  display: block;\n  height: 25px;\n  line-height: 25px;\n  margin: 0;\n  padding: 0 10px;\n  cursor: pointer;\n  font-size: 14px;\n  text-align: center;\n  width: 50px;\n  color: rgba(255, 255, 255, 0.5);\n  transition: color 300ms, background-color 300ms; }\n  .jb-pro-container .jb-pro-quality-menu-item:hover, .jb-pro-container .jb-pro-scale-menu-item:hover, .jb-pro-container .jb-pro-speed-menu-item:hover, .jb-pro-container .jb-pro-menu-item:hover {\n    background-color: rgba(255, 255, 255, 0.2); }\n  .jb-pro-container .jb-pro-quality-menu-item:focus, .jb-pro-container .jb-pro-scale-menu-item:focus, .jb-pro-container .jb-pro-speed-menu-item:focus, .jb-pro-container .jb-pro-menu-item:focus {\n    outline: none; }\n  .jb-pro-container .jb-pro-quality-menu-item.jb-pro-quality-menu-item-active, .jb-pro-container .jb-pro-quality-menu-item.jb-pro-scale-menu-item-active, .jb-pro-container .jb-pro-quality-menu-item.jb-pro-speed-menu-item-active, .jb-pro-container .jb-pro-quality-menu-item.jb-pro-menu-item-active, .jb-pro-container .jb-pro-scale-menu-item.jb-pro-quality-menu-item-active, .jb-pro-container .jb-pro-scale-menu-item.jb-pro-scale-menu-item-active, .jb-pro-container .jb-pro-scale-menu-item.jb-pro-speed-menu-item-active, .jb-pro-container .jb-pro-scale-menu-item.jb-pro-menu-item-active, .jb-pro-container .jb-pro-speed-menu-item.jb-pro-quality-menu-item-active, .jb-pro-container .jb-pro-speed-menu-item.jb-pro-scale-menu-item-active, .jb-pro-container .jb-pro-speed-menu-item.jb-pro-speed-menu-item-active, .jb-pro-container .jb-pro-speed-menu-item.jb-pro-menu-item-active, .jb-pro-container .jb-pro-menu-item.jb-pro-quality-menu-item-active, .jb-pro-container .jb-pro-menu-item.jb-pro-scale-menu-item-active, .jb-pro-container .jb-pro-menu-item.jb-pro-speed-menu-item-active, .jb-pro-container .jb-pro-menu-item.jb-pro-menu-item-active {\n    color: #2298FC; }\n\n.jb-pro-container .jb-pro-volume-panel-wrap {\n  position: absolute;\n  left: 50%;\n  bottom: 100%;\n  visibility: hidden;\n  opacity: 0;\n  transform: translateX(-50%) translateY(22%);\n  transition: visibility 300ms, opacity 300ms;\n  background-color: rgba(0, 0, 0, 0.5);\n  border-radius: 4px;\n  height: 120px;\n  width: 50px;\n  overflow: hidden; }\n  .jb-pro-container .jb-pro-volume-panel-wrap.jb-pro-volume-panel-wrap-show {\n    visibility: visible;\n    opacity: 1; }\n\n.jb-pro-container .jb-pro-volume-panel {\n  cursor: pointer;\n  position: absolute;\n  top: 21px;\n  height: 60px;\n  width: 50px;\n  overflow: hidden; }\n\n.jb-pro-container .jb-pro-volume-panel-text {\n  position: absolute;\n  left: 0;\n  top: 0;\n  width: 50px;\n  height: 20px;\n  line-height: 20px;\n  text-align: center;\n  color: #fff;\n  font-size: 12px; }\n\n.jb-pro-container .jb-pro-volume-panel-handle {\n  position: absolute;\n  top: 48px;\n  left: 50%;\n  width: 12px;\n  height: 12px;\n  border-radius: 12px;\n  margin-left: -6px;\n  background: #fff; }\n  .jb-pro-container .jb-pro-volume-panel-handle::before {\n    bottom: -54px;\n    background: #fff; }\n  .jb-pro-container .jb-pro-volume-panel-handle::after {\n    bottom: 6px;\n    background: rgba(255, 255, 255, 0.2); }\n  .jb-pro-container .jb-pro-volume-panel-handle::before, .jb-pro-container .jb-pro-volume-panel-handle::after {\n    content: '';\n    position: absolute;\n    display: block;\n    left: 50%;\n    width: 3px;\n    margin-left: -1px;\n    height: 60px; }\n\n.jb-pro-container.jb-pro-fullscreen-web .jb-pro-controls {\n  width: 100vh; }\n\n.jb-pro-container.jb-pro-fullscreen-web .jb-pro-play-big:after {\n  transform: translate(-50%, -50%) rotate(270deg); }\n\n.jb-pro-container.jb-pro-fullscreen-web .jb-pro-loading {\n  flex-direction: row; }\n\n.jb-pro-container.jb-pro-fullscreen-web .jb-pro-loading-text {\n  transform: rotate(270deg); }\n\n.jb-pro-container .jb-pro-contextmenus {\n  display: none;\n  flex-direction: column;\n  position: absolute;\n  z-index: 120;\n  left: 10px;\n  top: 10px;\n  min-width: 200px;\n  padding: 5px 0;\n  background-color: rgba(0, 0, 0, 0.9);\n  border-radius: 3px; }\n  .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu {\n    cursor: pointer;\n    font-size: 12px;\n    display: block;\n    color: #fff;\n    padding: 10px 15px;\n    overflow: hidden;\n    text-overflow: ellipsis;\n    white-space: nowrap;\n    text-shadow: 0 0 2px rgba(0, 0, 0, 0.5);\n    border-bottom: 1px solid rgba(255, 255, 255, 0.1); }\n    .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu a {\n      color: #fff;\n      text-decoration: none; }\n    .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span {\n      display: inline-block;\n      padding: 0 7px; }\n      .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span:hover, .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu span.art-current {\n        color: var(--theme); }\n    .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu:hover {\n      background-color: rgba(255, 255, 255, 0.1); }\n    .jb-pro-container .jb-pro-contextmenus .jb-pro-contextmenu:last-child {\n      border-bottom: none; }\n\n.jb-pro-container.jb-pro-contextmenus-show .jb-pro-contextmenus {\n  display: flex; }\n\n.jb-pro-container .jb-pro-extend-dom {\n  display: block;\n  position: relative;\n  width: 100%;\n  height: 100%;\n  display: none; }\n\n.jb-pro-container-playback .jb-pro-controls {\n  height: 48px; }\n  .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center {\n    flex: 1;\n    display: flex;\n    box-sizing: border-box;\n    justify-content: space-between;\n    font-size: 12px; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time {\n      box-sizing: border-box;\n      flex: 1;\n      position: relative;\n      height: 100%; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-inner {\n      width: 300px;\n      height: 100%;\n      overflow-y: hidden;\n      overflow-x: auto; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-current-time {\n      position: absolute;\n      left: 0px;\n      top: 0;\n      height: 15px;\n      width: 1px;\n      background-color: red;\n      text-align: center;\n      z-index: 1; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-current-time-text {\n      position: absolute;\n      box-sizing: border-box;\n      padding: 0 5px;\n      width: 60px;\n      left: -25px;\n      top: 15px;\n      border: 1px solid red;\n      height: 15px;\n      line-height: 15px;\n      cursor: move;\n      background-color: #fff;\n      color: #000;\n      -ms-user-select: none;\n      -webkit-user-select: none;\n      -moz-user-select: none;\n      user-select: none; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll {\n      position: relative;\n      width: 1440px;\n      margin: 0 auto; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.one-hour {\n        width: 1440px; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.half-hour {\n        width: 2880px; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.ten-min {\n        width: 8640px; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.five-min {\n        width: 17280px; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-scroll.one-min {\n        width: 86400px; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-time-list {\n      position: relative;\n      background-color: #ccc;\n      height: 48px; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-day {\n      height: 100%;\n      overflow: hidden; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-one-wrap {\n      height: 8px;\n      z-index: 1; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-wrap {\n      height: 25px; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-controls-playback-btns {\n      display: flex;\n      align-items: center; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one {\n      float: left;\n      width: 1px;\n      height: 8px;\n      margin: 0;\n      cursor: default;\n      position: relative;\n      z-index: 1; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one.active, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one.active {\n        background-color: orange;\n        cursor: pointer; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one.start, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one.start {\n        background-color: #999; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-one:hover .jb-pro-playback-time-title-tips, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-second-one:hover .jb-pro-playback-time-title-tips {\n        visibility: visible;\n        opacity: 1; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips {\n      pointer-events: none;\n      position: absolute;\n      left: 0;\n      top: 100%;\n      visibility: hidden;\n      opacity: 0;\n      transform: translateX(13%);\n      transition: visibility 300ms ease 0s, opacity 300ms ease 0s;\n      background-color: black;\n      border-radius: 4px;\n      z-index: 1; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips.jb-pro-playback-time-title-tips-left {\n        transform: translateX(-100%); }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-title-tips .jb-pro-playback-time-title {\n        display: inline-block;\n        padding: 2px 5px;\n        font-size: 12px;\n        white-space: nowrap;\n        color: white; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute {\n      float: left;\n      position: relative;\n      width: 60px;\n      box-sizing: border-box;\n      border-top: 1px solid #999;\n      -ms-user-select: none;\n      -webkit-user-select: none;\n      -moz-user-select: none;\n      user-select: none;\n      text-align: left;\n      height: 25px;\n      line-height: 25px; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:first-child, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:first-child {\n        border-left: 0; }\n        .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:first-child .jb-pro-playback-time-hour-text, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:first-child .jb-pro-playback-time-hour-text {\n          left: 0; }\n      .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour:after, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute:after {\n        content: '';\n        position: absolute;\n        left: 0;\n        top: -8px;\n        width: 1px;\n        height: 14px;\n        background-color: #999; }\n    .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-hour-text, .jb-pro-container-playback .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-center .jb-pro-playback-time-minute-text {\n      position: absolute;\n      left: -13px; }\n\n.jb-pro-container-playback .jb-pro-playback-narrow.disabled .jb-pro-icon-narrow {\n  cursor: no-drop; }\n\n.jb-pro-container-playback .jb-pro-playback-expand.disabled .jb-pro-icon-expand {\n  cursor: no-drop; }\n\n.jb-pro-container-playback .jb-pro-control-progress-simple {\n  position: absolute;\n  box-sizing: border-box;\n  left: 0;\n  top: -2px;\n  width: 100%;\n  display: flex;\n  flex-direction: row;\n  align-items: center;\n  height: 8px;\n  cursor: pointer; }\n  .jb-pro-container-playback .jb-pro-control-progress-simple:hover {\n    top: 0px;\n    align-items: flex-start; }\n    .jb-pro-container-playback .jb-pro-control-progress-simple:hover .jb-pro-control-progress-inner {\n      height: 100%; }\n      .jb-pro-container-playback .jb-pro-control-progress-simple:hover .jb-pro-control-progress-inner .jb-pro-progress-indicator {\n        transform: scale(1, 1);\n        visibility: visible; }\n\n.jb-pro-container-playback .jb-pro-control-progress-inner {\n  display: flex;\n  align-items: center;\n  position: relative;\n  height: 50%;\n  width: 100%;\n  transition: all 0.2s ease;\n  background: rgba(255, 255, 255, 0.5); }\n\n.jb-pro-container-playback .jb-pro-progress-hover {\n  display: none;\n  width: 0%; }\n\n.jb-pro-container-playback .jb-pro-progress-played {\n  position: absolute;\n  left: 0;\n  top: 0;\n  right: 0;\n  bottom: 0;\n  height: 100%;\n  width: 0;\n  background-color: orange; }\n\n.jb-pro-container-playback .jb-pro-progress-indicator {\n  visibility: hidden;\n  align-items: center;\n  justify-content: center;\n  position: absolute;\n  z-index: 40;\n  border-radius: 50%;\n  transform: scale(0.1, 0.1);\n  transition: transform 0.1s ease-in-out; }\n  .jb-pro-container-playback .jb-pro-progress-indicator .jb-pro-icon {\n    width: 100%;\n    height: 100%;\n    pointer-events: none;\n    user-select: none; }\n  .jb-pro-container-playback .jb-pro-progress-indicator:hover {\n    transform: scale(1.2, 1.2) !important; }\n\n.jb-pro-container-playback .jb-pro-progress-tip {\n  display: none;\n  position: absolute;\n  z-index: 50;\n  top: -25px;\n  left: 0;\n  height: 20px;\n  padding: 0 5px;\n  line-height: 20px;\n  color: #fff;\n  font-size: 12px;\n  text-align: center;\n  background: rgba(0, 0, 0, 0.7);\n  border-radius: 3px;\n  font-weight: bold;\n  white-space: nowrap; }\n\n.jb-pro-container-playback.jb-pro-fullscreen-web .jb-pro-controls .jb-pro-controls-bottom .jb-pro-controls-playback-time-inner {\n  overflow-y: auto; }\n\n.jb-pro-zoom-control {\n  cursor: grab; }\n\n.jb-pro-performance-panel {\n  position: absolute;\n  box-sizing: border-box;\n  z-index: 10000;\n  left: 0;\n  top: 0;\n  padding: 5px;\n  font-size: 10px;\n  background: rgba(0, 0, 0, 0.2);\n  color: #fff;\n  max-height: 100%;\n  overflow-y: auto;\n  display: none; }\n  .jb-pro-performance-panel .jb-pro-performance-item {\n    display: flex;\n    align-items: center;\n    margin-top: 3px;\n    color: white; }\n  .jb-pro-performance-panel .jb-pro-performance-item-block {\n    height: 10px; }\n\n.jb-pro-tips-message {\n  position: absolute;\n  top: 0;\n  left: 0;\n  width: 100%;\n  height: 100%;\n  background: linear-gradient(180deg, rgba(0, 0, 0, 0.8), rgba(255, 255, 255, 0));\n  overflow: auto;\n  box-sizing: content-box;\n  display: none; }\n  .jb-pro-tips-message:before {\n    color: rgba(255, 255, 255, 0.3);\n    content: 'X';\n    font-family: Arial, Helvetica, sans-serif;\n    font-size: 40px;\n    left: 0;\n    line-height: 1;\n    margin-top: -20px;\n    position: absolute;\n    text-shadow: 2em 2em 4em #000;\n    text-align: center;\n    top: 50%;\n    vertical-align: middle;\n    width: 100%; }\n  .jb-pro-tips-message .jb-pro-tips-message-close {\n    position: absolute;\n    z-index: 99999;\n    right: 0px;\n    top: 0px;\n    width: 40px;\n    height: 40px;\n    display: flex;\n    align-items: center;\n    justify-content: center; }\n    .jb-pro-tips-message .jb-pro-tips-message-close .jb-pro-tips-message-close-icon {\n      width: 20px;\n      height: 20px;\n      border-radius: 10px;\n      cursor: pointer;\n      background: url(\"../assets/close.png\") no-repeat center;\n      background-color: #fff;\n      background-size: 100% 100%; }\n  .jb-pro-tips-message .jb-pro-tips-message-content {\n    overflow: auto;\n    padding: 35px;\n    box-sizing: border-box;\n    width: 100%;\n    height: 100%; }\n    .jb-pro-tips-message .jb-pro-tips-message-content .jb-pro-tips-message-content-item {\n      font-size: 14px;\n      color: #fff;\n      text-align: center;\n      line-height: 1.5; }\n"]} */"; styleInject(css_248z$1); // todo: 待定 var hotkey = ((player, control) => { const { events: { proxy } } = player; const keys = {}; function addHotkey(key, event) { if (keys[key]) { keys[key].push(event); } else { keys[key] = [event]; } } // addHotkey(HOT_KEY.esc, () => { if (player.fullscreen) { player.fullscreen = false; } }); // addHotkey(HOT_KEY.arrowUp, () => { player.volume += 0.05; }); // addHotkey(HOT_KEY.arrowDown, () => { player.volume -= 0.05; }); proxy(window, 'keydown', event => { if (control.isFocus) { const tag = document.activeElement.tagName.toUpperCase(); const editable = document.activeElement.getAttribute('contenteditable'); if (tag !== 'INPUT' && tag !== 'TEXTAREA' && editable !== '' && editable !== 'true') { const events = keys[event.keyCode]; if (events) { event.preventDefault(); events.forEach(fn => fn()); } } } }); }); class Control { constructor(player) { this.player = player; this.TAG_NAME = 'Control'; this.extendBtnList = []; template(player, this); if (player._opt.extendOperateBtns.length > 0) { player._opt.extendOperateBtns.forEach(btn => { this.addExtendBtn(btn); }); } if (player._opt.extendDomConfig && player._opt.extendDomConfig.html) { this.addExtendDom(player._opt.extendDomConfig); } property(player, this); observer$1(player, this); events(player, this); if (player._opt.hotKey) { hotkey(player, this); } this.btnIndex = 0; // 当触发异常机制,要保留最后一帧画面 this.initLoadingBackground(); // update loading style if (isNotEmptyObject(player._opt.loadingIconStyle)) { this.initLoadingIconStyle(player._opt.loadingIconStyle); } if (isNotEmptyObject(player._opt.ptzPositionConfig)) { this.updatePtzPosition(player._opt.ptzPositionConfig); } this.kbpsShow = '0 KB/s'; this.player.debug.log('Control', 'init'); } destroy() { // performance if (this.$performancePanel) { this.$performancePanel.innerHTML = ''; const result = removeElement(this.$performancePanel); if (!result) { const $performancePanel = this.player.$container.querySelector('.jb-pro-performance-panel'); if ($performancePanel && this.player.$container) { this.player.$container.removeChild($performancePanel); } } } // if (this.$poster) { const result = removeElement(this.$poster); if (!result) { const $poster = this.player.$container.querySelector('.jb-pro-poster'); if ($poster && this.player.$container) { this.player.$container.removeChild($poster); } } } if (this.$loading) { const result = removeElement(this.$loading); if (!result) { const $loading = this.player.$container.querySelector('.jb-pro-loading'); if ($loading && this.player.$container) { this.player.$container.removeChild($loading); } } } if (this.$loadingBg) { const result = removeElement(this.$loadingBg); if (!result) { const $loadingBg = this.player.$container.querySelector('.jb-pro-loading-bg'); if ($loadingBg && this.player.$container) { this.player.$container.removeChild($loadingBg); } } } if (this.$controls) { const result = removeElement(this.$controls); if (!result) { const $controls = this.player.$container.querySelector('.jb-pro-controls'); if ($controls && this.player.$container) { this.player.$container.removeChild($controls); } } } if (this.$playBig) { const result = removeElement(this.$playBig); if (!result) { const $playBig = this.player.$container.querySelector('.jb-pro-play-big'); if ($playBig && this.player.$container) { this.player.$container.removeChild($playBig); } } } if (this.$recording) { const result = removeElement(this.$recording); if (!result) { const $recording = this.player.$container.querySelector('.jb-pro-recording'); if ($recording && this.player.$container) { this.player.$container.removeChild($recording); } } } if (this.$ptzControl) { const result = removeElement(this.$ptzControl); if (!result) { const $ptzControl = this.player.$container.querySelector('.jb-pro-ptz-controls'); if ($ptzControl && this.player.$container) { this.player.$container.removeChild($ptzControl); } } } if (this.$zoomControls) { const result = removeElement(this.$zoomControls); if (!result) { const $zoomControls = this.player.$container.querySelector('.jb-pro-zoom-controls'); if ($zoomControls && this.player.$container) { this.player.$container.removeChild($zoomControls); } } } if (this.$contextmenus) { this.$contextmenus.innerHTML = ''; const result = removeElement(this.$contextmenus); if (!result) { const $contextmenus = this.player.$container.querySelector('.jb-pro-contextmenus'); if ($contextmenus && this.player.$container) { this.player.$container.removeChild($contextmenus); } } } if (this.$tipsMessage) { const result = removeElement(this.$tipsMessage); if (!result) { const $tipsMessage = this.player.$container.querySelector('.jb-pro-tips-message'); if ($tipsMessage && this.player.$container) { this.player.$container.removeChild($tipsMessage); } } } if (this.$extendDom) { const result = removeElement(this.$extendDom); if (!result) { const $extendDom = this.player.$container.querySelector('.jb-pro-extend-dom'); if ($extendDom && this.player.$container) { this.player.$container.removeChild($extendDom); } } } this.btnIndex = 0; this.extendBtnList = []; this.kbpsShow = '0 KB/s'; if (this.player.$container) { this.player.$container.classList.remove('jb-pro-controls-show-auto-hide'); this.player.$container.classList.remove('jb-pro-controls-show'); } this.player.debug.log('Control', 'destroy'); } getBtnIndex() { return this.btnIndex++; } autoSize() { const player = this.player; player.$container.style.padding = '0 0'; const playerWidth = player.width; const playerHeight = player.height; const playerRatio = playerWidth / playerHeight; const canvasWidth = player.video.$videoElement.width; const canvasHeight = player.video.$videoElement.height; const canvasRatio = canvasWidth / canvasHeight; if (playerRatio > canvasRatio) { const padding = (playerWidth - playerHeight * canvasRatio) / 2; player.$container.style.padding = `0 ${padding}px`; } else { const padding = (playerHeight - playerWidth / canvasRatio) / 2; player.$container.style.padding = `${padding}px 0`; } } initLoadingBackground() { const player = this.player; if (player._opt.loadingBackground && player._opt.loadingBackgroundWidth && player._opt.loadingBackgroundHeight) { player.debug.log('Control', 'initLoadingBackground()'); if (this.player._opt.aspectRatio === 'default' || isMobile()) { if (player.getRenderType() === RENDER_TYPE.canvas) { this._initLoadingBackground(); } else if (player.getRenderType() === RENDER_TYPE.video) { this._initLoadingBackground(); } } else { this._initLoadingBackgroundForRatio(); } setStyle(this.$loadingBg, 'display', 'block'); // 用完就要销毁 player._opt.loadingBackground = ''; player._opt.loadingBackgroundWidth = 0; player._opt.loadingBackgroundHeight = 0; } } initLoadingIconStyle(style) { const $loadingIcon = this.player.$container.querySelector('.jb-pro-icon-loading'); if ($loadingIcon) { if (style.width) { setStyle($loadingIcon, 'width', `${style.width}px`); } if (style.height) { setStyle($loadingIcon, 'height', `${style.height}px`); } if (style.background) { setStyle($loadingIcon, 'backgroundImage', `url("${style.background}")`); } if (style.hasAnimation === false) { setStyle($loadingIcon, 'animationName', "none"); setStyle($loadingIcon, 'animationDuration', 0); setStyle($loadingIcon, 'animationTimingFunction', "ease"); setStyle($loadingIcon, 'animationIterationCount', 1); } } } _initLoadingBackgroundForRatio() { const ratioArray = this.player._opt.aspectRatio.split(':').map(Number); let width = this.player.width; let height = this.player.height; const option = this.player._opt; let controlHeight = 0; if (option.hasControl && !option.controlAutoHide) { controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; height -= controlHeight; } const videoInfo = { width: this.player._opt.loadingBackgroundWidth, height: this.player._opt.loadingBackgroundHeight }; const videoRatio = videoInfo.width / videoInfo.height; const setupRatio = ratioArray[0] / ratioArray[1]; this.$loadingBgImage.src = this.player._opt.loadingBackground; if (videoRatio > setupRatio) { const percentage = setupRatio * videoInfo.height / videoInfo.width; this.$loadingBgImage.style.width = `${percentage * 100}%`; this.$loadingBgImage.style.height = `calc(100% - ${controlHeight}px)`; this.$loadingBgImage.style.padding = `0 ${(width - width * percentage) / 2}px`; } else { const percentage = videoInfo.width / setupRatio / videoInfo.height; this.$loadingBgImage.style.width = '100%'; this.$loadingBgImage.style.height = `calc(${percentage * 100}% - ${controlHeight}px)`; this.$loadingBgImage.style.padding = `${(height - height * percentage) / 2}px 0`; } } _initLoadingBackground() { const player = this.player; let height = player.height; const option = player._opt; if (option.hasControl && !option.controlAutoHide) { const controlHeight = option.playType === PLAY_TYPE.playbackTF ? CONTROL_PLAYBACK_HEIGHT : CONTROL_HEIGHT; height -= controlHeight; } let resizeWidth = player.width; let resizeHeight = height; const rotate = option.rotate; if (rotate === 270 || rotate === 90) { resizeWidth = height; resizeHeight = player.width; } this.$loadingBgImage.width = resizeWidth; this.$loadingBgImage.height = resizeHeight; this.$loadingBgImage.src = player._opt.loadingBackground; let left = (player.width - resizeWidth) / 2; let top = (height - resizeHeight) / 2; let objectFill = 'contain'; if (!option.isResize) { objectFill = 'fill'; } if (option.isFullResize) { objectFill = 'none'; } let transform = ''; if (option.mirrorRotate === 'none') { if (rotate) { transform += ' rotate(' + rotate + 'deg)'; } } if (option.mirrorRotate === 'level') { transform += ' rotateY(180deg)'; // 水平镜像翻转 } else if (option.mirrorRotate === 'vertical') { transform += ' rotateX(180deg)'; // 垂直镜像翻转 } if (this.player._opt.videoRenderSupportScale) { this.$loadingBgImage.style.objectFit = objectFill; } this.$loadingBgImage.style.transform = transform; this.$loadingBgImage.style.padding = "0"; this.$loadingBgImage.style.left = left + "px"; this.$loadingBgImage.style.top = top + "px"; } _validateExtendBtn(options) { let result = true; if (!options.name) { this.player.debug.warn('Control', 'extend button name is required'); result = false; } if (result) { const findIndex = this.extendBtnList.findIndex(item => item.name === options.name); if (findIndex !== -1) { this.player.debug.warn('Control', `extend button name: ${options.name} is already exist`); result = false; } } if (result) { if (!options.icon) { this.player.debug.warn('Control', 'extend button icon is required'); result = false; } } return result; } addExtendBtn() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; const defaultOptions = getDefaultButtonOptions(); options = Object.assign({}, defaultOptions, options); if (!this._validateExtendBtn(options)) { return; } const name = options.name || ''; const $controlsRight = this.$controlsRight; const hasActive = options.activeIcon && options.activeClick; const $btn = `
${options.icon ? `
${options.iconTitle ? ` ${options.iconTitle} ` : ``}
` : ''} ${options.activeIcon ? `
${options.activeIconTitle ? ` ${options.activeIconTitle} ` : ``}
` : ''}
`; const $childList = Array.from($controlsRight.children); const nextChild = $childList[options.index]; if (nextChild) { // insert before nextChild.insertAdjacentHTML('beforebegin', $btn); } else { append($controlsRight, $btn); } const $iconContainer = $controlsRight.querySelector(`.jb-pro-controls-item-wrap-${name}`); const $icon = options.icon ? $controlsRight.querySelector(`.jb-pro-icon-extend-${name}`) : null; const $iconWrap = options.icon ? $controlsRight.querySelector(`.jb-pro-control-extend-${name}`) : null; const $activeIcon = options.activeIcon ? $controlsRight.querySelector(`.jb-pro-icon-extend-${name}-active`) : null; const $activeIconWrap = options.activeIcon ? $controlsRight.querySelector(`.jb-pro-control-extend-${name}-active`) : null; const { events: { proxy }, debug } = this.player; if (options.icon) { setStyle($icon, 'background', `url(${options.icon}) no-repeat center`); setStyle($icon, 'background-size', '100% 100%'); setStyle($iconWrap, 'display', 'none'); if (options.iconHover) { proxy($icon, 'mouseover', () => { setStyle($icon, 'background', `url(${options.iconHover}) no-repeat center`); setStyle($icon, 'background-size', '100% 100%'); }); proxy($icon, 'mouseout', () => { setStyle($icon, 'background', `url(${options.icon}) no-repeat center`); setStyle($icon, 'background-size', '100% 100%'); }); } } if (options.activeIcon) { setStyle($activeIcon, 'background', `url(${options.activeIcon}) no-repeat center`); setStyle($activeIcon, 'background-size', '100% 100%'); setStyle($activeIconWrap, 'display', 'none'); if (options.activeIconHover) { proxy($activeIcon, 'mouseover', () => { setStyle($activeIcon, 'background', `url(${options.activeIconHover}) no-repeat center`); setStyle($activeIcon, 'background-size', '100% 100%'); }); proxy($activeIcon, 'mouseout', () => { setStyle($activeIcon, 'background', `url(${options.activeIcon}) no-repeat center`); setStyle($activeIcon, 'background-size', '100% 100%'); }); } } if (options.click && $icon) { proxy($icon, 'click', event => { event.preventDefault(); if (hasActive) { setStyle($iconWrap, 'display', 'none'); setStyle($activeIconWrap, 'display', 'flex'); } if (this.player.isInMulti()) { options.click.call(this.player, event, this.player._opt.multiIndex); } else { options.click.call(this.player, event); } }); } if (options.activeClick && $activeIcon) { proxy($activeIcon, 'click', event => { event.preventDefault(); setStyle($iconWrap, 'display', 'flex'); setStyle($activeIconWrap, 'display', 'none'); if (this.player.isInMulti()) { options.activeClick.call(this.player, event, this.player._opt.multiIndex); } else { options.activeClick.call(this.player, event); } }); } this.extendBtnList.push({ name, $iconContainer, $iconWrap, $activeIconWrap }); } addExtendDom(extendDomConfig) { this.player.debug.log(this.TAG_NAME, 'addExtendDom'); if (extendDomConfig.html) { const extendDomWrap = `
${extendDomConfig.html}
`; this.player.$container.insertAdjacentHTML('beforeend', extendDomWrap); Object.defineProperty(this, '$extendDom', { value: this.player.$container.querySelector('.jb-pro-extend-dom') }); // show if (extendDomConfig.showBeforePlay) { setStyle(this.$extendDom, 'display', 'block'); } } } toggleExtendDom(flag) { if (this.$extendDom) { if (!isBoolean(flag)) { flag = this.$extendDom.style.display === 'none'; } if (flag) { setStyle(this.$extendDom, 'display', 'block'); } else { setStyle(this.$extendDom, 'display', 'none'); } } } updateExtendDom(extendDom) { this.player.debug.log(this.TAG_NAME, 'updateExtendDom'); if (this.$extendDom) { this.$extendDom.innerHTML = extendDom; } } removeExtendDom() { this.player.debug.log(this.TAG_NAME, 'removeExtendDom'); if (this.$extendDom) { this.$extendDom.innerHTML = ''; } } updateLoadingText(text) { if (this.$loadingText) { this.$loadingText.innerText = text; } } getExtendBtnList() { return this.extendBtnList; } showTipsMessage(content, type) { const $tipsMessage = this.$tipsMessage; const $tipsMessageContent = this.$tipsMessageContent; if ($tipsMessage) { const htmlContent = `
${content}
${type ? `
Error Type:${type}
` : ''} `; $tipsMessageContent.innerHTML = htmlContent; setStyle($tipsMessage, 'display', 'block'); } } hideTipsMessage() { const $tipsMessage = this.$tipsMessage; if ($tipsMessage) { $tipsMessageContent.innerHTML = ''; setStyle($tipsMessage, 'display', 'none'); } } updatePtzPosition(position) { const $ptzControl = this.$ptzControl; if (isNotEmptyObject(position) && $ptzControl) { let left = 'auto'; if (position.left) { left = Number(position.left) === position.left ? position.left + 'px' : position.left; } setStyle($ptzControl, 'left', left); let top = 'auto'; if (position.top) { top = Number(position.top) === position.top ? position.top + 'px' : position.top; } setStyle($ptzControl, 'top', top); let bottom = 'auto'; if (position.bottom) { bottom = Number(position.bottom) === position.bottom ? position.bottom + 'px' : position.bottom; } setStyle($ptzControl, 'bottom', bottom); let right = 'auto'; if (position.right) { right = Number(position.right) === position.right ? position.right + 'px' : position.right; } setStyle($ptzControl, 'right', right); } } } var css_248z = ".jb-pro-container{position:relative;width:100%;height:100%;overflow:hidden}.jb-pro-container.jb-pro-fullscreen-web{position:fixed;z-index:9999;left:0;top:0;right:0;bottom:0;width:100vw!important;height:100vh!important;background:#000}\n/*# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInN0eWxlLnNjc3MiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsa0JBQ0UsaUJBQWtCLENBQ2xCLFVBQVcsQ0FDWCxXQUFZLENBQ1osZUFBa0IsQ0FDbEIsd0NBQ0UsY0FBZSxDQUNmLFlBQWEsQ0FDYixNQUFPLENBQ1AsS0FBTSxDQUNOLE9BQVEsQ0FDUixRQUFTLENBQ1QscUJBQXVCLENBQ3ZCLHNCQUF3QixDQUN4QixlQUFrQiIsImZpbGUiOiJzdHlsZS5zY3NzIiwic291cmNlc0NvbnRlbnQiOlsiLmpiLXByby1jb250YWluZXIge1xuICBwb3NpdGlvbjogcmVsYXRpdmU7XG4gIHdpZHRoOiAxMDAlO1xuICBoZWlnaHQ6IDEwMCU7XG4gIG92ZXJmbG93OiBoaWRkZW47IH1cbiAgLmpiLXByby1jb250YWluZXIuamItcHJvLWZ1bGxzY3JlZW4td2ViIHtcbiAgICBwb3NpdGlvbjogZml4ZWQ7XG4gICAgei1pbmRleDogOTk5OTtcbiAgICBsZWZ0OiAwO1xuICAgIHRvcDogMDtcbiAgICByaWdodDogMDtcbiAgICBib3R0b206IDA7XG4gICAgd2lkdGg6IDEwMHZ3ICFpbXBvcnRhbnQ7XG4gICAgaGVpZ2h0OiAxMDB2aCAhaW1wb3J0YW50O1xuICAgIGJhY2tncm91bmQ6ICMwMDA7IH1cbiJdfQ== */"; styleInject(css_248z); var observer = (player => { const { _opt, debug, events: { proxy } } = player; if (_opt.supportDblclickFullscreen) { proxy(player.$container, 'dblclick', e => { const target = getTarget(e); const nodeName = target.nodeName.toLowerCase(); if (nodeName === 'canvas' || nodeName === 'video') { player.fullscreen = !player.fullscreen; } }); } // proxy(document, 'visibilitychange', () => { player.visibility = "visible" === document.visibilityState; debug.log('visibilitychange', document.visibilityState); if (_opt.hiddenAutoPause) { debug.log('visibilitychange', 'hiddenAutoPause is true ', document.visibilityState, player._isPlayingBeforePageHidden); if ("visible" === document.visibilityState) { if (player._isPlayingBeforePageHidden) { player.play(); } } else { player._isPlayingBeforePageHidden = player.playing; // hidden if (player.playing) { player.pause(); } } } }); if (isPc()) { proxy(document, ['click', 'contextmenu'], e => { if (includeFromEvent(e, player.$container)) { // is contextmenu if (isTrue(player._opt.disableContextmenu) && e.type === 'contextmenu') { e.preventDefault(); } player.isInput = e.target.tagName === 'INPUT'; player.isFocus = true; player.emit(EVENTS.focus); } else { player.isInput = false; player.isFocus = false; player.emit(EVENTS.blur); } }); } if (_opt.autoResize) { const resizeFn = throttle(() => { player.resize(); }, PLAYER_RESIZE_TIME); proxy(window, ['resize', 'orientationchange'], () => { resizeFn(); }); // check screen orientation change if (screen && screen.orientation && screen.orientation.onchange) { proxy(screen.orientation, 'change', () => { resizeFn(); }); } } }); // MP4 boxes generator for ISO BMFF (ISO Base Media File Format, defined in ISO/IEC 14496-12) class MP4$2 { static init() { MP4$2.types = { avc1: [], avcC: [], hvc1: [], hvcC: [], av01: [], av1C: [], btrt: [], dinf: [], dref: [], esds: [], ftyp: [], hdlr: [], mdat: [], mdhd: [], mdia: [], mfhd: [], minf: [], moof: [], moov: [], mp4a: [], mvex: [], mvhd: [], sdtp: [], stbl: [], stco: [], stsc: [], stsd: [], stsz: [], stts: [], tfdt: [], tfhd: [], traf: [], trak: [], trun: [], trex: [], tkhd: [], vmhd: [], smhd: [], '.mp3': [], Opus: [], dOps: [], 'ac-3': [], dac3: [], 'ec-3': [], dec3: [] }; for (let name in MP4$2.types) { if (MP4$2.types.hasOwnProperty(name)) { MP4$2.types[name] = [name.charCodeAt(0), name.charCodeAt(1), name.charCodeAt(2), name.charCodeAt(3)]; } } let constants = MP4$2.constants = {}; constants.FTYP = new Uint8Array([0x69, 0x73, 0x6F, 0x6D, // major_brand: isom 0x0, 0x0, 0x0, 0x1, // minor_version: 0x01 0x69, 0x73, 0x6F, 0x6D, // isom 0x61, 0x76, 0x63, 0x31 // avc1 ]); constants.STSD_PREFIX = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x01 // entry_count ]); constants.STTS = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00 // entry_count ]); constants.STSC = constants.STCO = constants.STTS; constants.STSZ = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // sample_size 0x00, 0x00, 0x00, 0x00 // sample_count ]); constants.HDLR_VIDEO = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide' 0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x56, 0x69, 0x64, 0x65, 0x6F, 0x48, 0x61, 0x6E, 0x64, 0x6C, 0x65, 0x72, 0x00 // name: VideoHandler ]); constants.HDLR_AUDIO = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x73, 0x6F, 0x75, 0x6E, // handler_type: 'soun' 0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x53, 0x6F, 0x75, 0x6E, 0x64, 0x48, 0x61, 0x6E, 0x64, 0x6C, 0x65, 0x72, 0x00 // name: SoundHandler ]); constants.DREF = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x01, // entry_count 0x00, 0x00, 0x00, 0x0C, // entry_size 0x75, 0x72, 0x6C, 0x20, // type 'url ' 0x00, 0x00, 0x00, 0x01 // version(0) + flags ]); // Sound media header constants.SMHD = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00 // balance(2) + reserved(2) ]); // video media header constants.VMHD = new Uint8Array([0x00, 0x00, 0x00, 0x01, // version(0) + flags 0x00, 0x00, // graphicsmode: 2 bytes 0x00, 0x00, 0x00, 0x00, // opcolor: 3 * 2 bytes 0x00, 0x00]); } // Generate a box static box(type) { let size = 8; let result = null; let datas = Array.prototype.slice.call(arguments, 1); let arrayCount = datas.length; for (let i = 0; i < arrayCount; i++) { size += datas[i].byteLength; } result = new Uint8Array(size); result[0] = size >>> 24 & 0xFF; // size result[1] = size >>> 16 & 0xFF; result[2] = size >>> 8 & 0xFF; result[3] = size & 0xFF; result.set(type, 4); // type let offset = 8; for (let i = 0; i < arrayCount; i++) { // data body result.set(datas[i], offset); offset += datas[i].byteLength; } return result; } // emit ftyp & moov /** * ftyp box与Moov box绑定,描述数据的类型、兼容协议以及视频参数 * @param meta * @returns {Uint8Array} */ static generateInitSegment(meta) { let ftyp = MP4$2.box(MP4$2.types.ftyp, MP4$2.constants.FTYP); let moov = MP4$2.moov(meta); let result = new Uint8Array(ftyp.byteLength + moov.byteLength); result.set(ftyp, 0); result.set(moov, ftyp.byteLength); return result; } // Movie metadata box static moov(meta) { let mvhd = MP4$2.mvhd(meta.timescale, meta.duration); let trak = MP4$2.trak(meta); let mvex = MP4$2.mvex(meta); return MP4$2.box(MP4$2.types.moov, mvhd, trak, mvex); } // Movie header box static mvhd(timescale, duration) { return MP4$2.box(MP4$2.types.mvhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // creation_time 0x00, 0x00, 0x00, 0x00, // modification_time timescale >>> 24 & 0xFF, // timescale: 4 bytes timescale >>> 16 & 0xFF, timescale >>> 8 & 0xFF, timescale & 0xFF, duration >>> 24 & 0xFF, // duration: 4 bytes duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x00, 0x01, 0x00, 0x00, // Preferred rate: 1.0 0x01, 0x00, 0x00, 0x00, // PreferredVolume(1.0, 2bytes) + reserved(2bytes) 0x00, 0x00, 0x00, 0x00, // reserved: 4 + 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, // ----begin composition matrix---- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // ----end composition matrix---- 0x00, 0x00, 0x00, 0x00, // ----begin pre_defined 6 * 4 bytes---- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ----end pre_defined 6 * 4 bytes---- 0xFF, 0xFF, 0xFF, 0xFF // next_track_ID ])); } // Track box static trak(meta) { return MP4$2.box(MP4$2.types.trak, MP4$2.tkhd(meta), MP4$2.mdia(meta)); } // Track header box static tkhd(meta) { let trackId = meta.id, duration = meta.duration; let width = meta.presentWidth, height = meta.presentHeight; return MP4$2.box(MP4$2.types.tkhd, new Uint8Array([0x00, 0x00, 0x00, 0x07, // version(0) + flags 0x00, 0x00, 0x00, 0x00, // creation_time 0x00, 0x00, 0x00, 0x00, // modification_time trackId >>> 24 & 0xFF, // track_ID: 4 bytes trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF, 0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes duration >>> 24 & 0xFF, // duration: 4 bytes duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // layer(2bytes) + alternate_group(2bytes) 0x00, 0x00, 0x00, 0x00, // volume(2bytes) + reserved(2bytes) 0x00, 0x01, 0x00, 0x00, // ----begin composition matrix---- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // ----end composition matrix---- width >>> 8 & 0xFF, // width and height width & 0xFF, 0x00, 0x00, // width height >>> 8 & 0xFF, height & 0xFF, 0x00, 0x00 // height ])); } // Media Box static mdia(meta) { return MP4$2.box(MP4$2.types.mdia, MP4$2.mdhd(meta), MP4$2.hdlr(meta), MP4$2.minf(meta)); } // Media header box static mdhd(meta) { let timescale = meta.timescale; let duration = meta.duration; return MP4$2.box(MP4$2.types.mdhd, new Uint8Array([0x00, // version(0) 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, // creation_time 0x00, 0x00, 0x00, 0x00, // modification_time timescale >>> 24 & 0xFF, // timescale: 4 bytes timescale >>> 16 & 0xFF, timescale >>> 8 & 0xFF, timescale & 0xFF, // timescale duration >>> 24 & 0xFF, // duration: 4 bytes duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, // duration 0x55, 0xC4, // language: und (undetermined) 0x00, 0x00 // pre_defined = 0 ])); } // Media handler reference box static hdlr(meta) { let data = null; if (meta.type === 'audio') { data = MP4$2.constants.HDLR_AUDIO; } else { data = MP4$2.constants.HDLR_VIDEO; } return MP4$2.box(MP4$2.types.hdlr, data); } // Media infomation box static minf(meta) { let xmhd = null; if (meta.type === 'audio') { xmhd = MP4$2.box(MP4$2.types.smhd, MP4$2.constants.SMHD); } else { xmhd = MP4$2.box(MP4$2.types.vmhd, MP4$2.constants.VMHD); } return MP4$2.box(MP4$2.types.minf, xmhd, MP4$2.dinf(), MP4$2.stbl(meta)); } // Data infomation box static dinf() { let result = MP4$2.box(MP4$2.types.dinf, MP4$2.box(MP4$2.types.dref, MP4$2.constants.DREF)); return result; } // Sample table box static stbl(meta) { let result = MP4$2.box(MP4$2.types.stbl, // type: stbl MP4$2.stsd(meta), // Sample Description Table MP4$2.box(MP4$2.types.stts, MP4$2.constants.STTS), // Time-To-Sample MP4$2.box(MP4$2.types.stsc, MP4$2.constants.STSC), // Sample-To-Chunk MP4$2.box(MP4$2.types.stsz, MP4$2.constants.STSZ), // Sample size MP4$2.box(MP4$2.types.stco, MP4$2.constants.STCO) // Chunk offset ); return result; } // Sample description box static stsd(meta) { if (meta.type === 'audio') { if (meta.audioType === 'mp3') { return MP4$2.box(MP4$2.types.stsd, MP4$2.constants.STSD_PREFIX, MP4$2.mp3(meta)); } // else: aac -> mp4a return MP4$2.box(MP4$2.types.stsd, MP4$2.constants.STSD_PREFIX, MP4$2.mp4a(meta)); } else { if (meta.videoType === 'avc') { // avc return MP4$2.box(MP4$2.types.stsd, MP4$2.constants.STSD_PREFIX, MP4$2.avc1(meta)); } else { // hevc return MP4$2.box(MP4$2.types.stsd, MP4$2.constants.STSD_PREFIX, MP4$2.hvc1(meta)); } } } static mp3(meta) { let channelCount = meta.channelCount; let sampleRate = meta.audioSampleRate; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // reserved(4) 0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2) 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, channelCount, // channelCount(2) 0x00, 0x10, // sampleSize(2) 0x00, 0x00, 0x00, 0x00, // reserved(4) sampleRate >>> 8 & 0xFF, // Audio sample rate sampleRate & 0xFF, 0x00, 0x00]); return MP4$2.box(MP4$2.types['.mp3'], data); } static mp4a(meta) { let channelCount = meta.channelCount; let sampleRate = meta.audioSampleRate; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // reserved(4) 0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2) 0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes 0x00, 0x00, 0x00, 0x00, 0x00, channelCount, // channelCount(2) 0x00, 0x10, // sampleSize(2) 0x00, 0x00, 0x00, 0x00, // reserved(4) sampleRate >>> 8 & 0xFF, // Audio sample rate sampleRate & 0xFF, 0x00, 0x00]); return MP4$2.box(MP4$2.types.mp4a, data, MP4$2.esds(meta)); } static esds(meta) { let config = meta.config || []; let configSize = config.length; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version 0 + flags 0x03, // descriptor_type 0x17 + configSize, // length3 0x00, 0x01, // es_id 0x00, // stream_priority 0x04, // descriptor_type 0x0F + configSize, // length 0x40, // codec: mpeg4_audio 0x15, // stream_type: Audio 0x00, 0x00, 0x00, // buffer_size 0x00, 0x00, 0x00, 0x00, // maxBitrate 0x00, 0x00, 0x00, 0x00, // avgBitrate 0x05 // descriptor_type ].concat([configSize]).concat(config).concat([0x06, 0x01, 0x02 // GASpecificConfig ])); return MP4$2.box(MP4$2.types.esds, data); } // avc static avc1(meta) { let avcc = meta.avcc; const width = meta.codecWidth; const height = meta.codecHeight; let data = new Uint8Array([0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, width >>> 8 & 255, width & 255, height >>> 8 & 255, height & 255, 0, 72, 0, 0, 0, 72, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 255, 255]); return MP4$2.box(MP4$2.types.avc1, data, MP4$2.box(MP4$2.types.avcC, avcc)); } // hvc static hvc1(meta) { let avcc = meta.avcc; const width = meta.codecWidth; const height = meta.codecHeight; let data = new Uint8Array([0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, width >>> 8 & 255, width & 255, height >>> 8 & 255, height & 255, 0, 72, 0, 0, 0, 72, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 255, 255]); return MP4$2.box(MP4$2.types.hvc1, data, MP4$2.box(MP4$2.types.hvcC, avcc)); } // Movie Extends box static mvex(meta) { return MP4$2.box(MP4$2.types.mvex, MP4$2.trex(meta)); } // Track Extends box static trex(meta) { let trackId = meta.id; let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) + flags trackId >>> 24 & 0xFF, // track_ID trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF, 0x00, 0x00, 0x00, 0x01, // default_sample_description_index 0x00, 0x00, 0x00, 0x00, // default_sample_duration 0x00, 0x00, 0x00, 0x00, // default_sample_size 0x00, 0x01, 0x00, 0x01 // default_sample_flags ]); return MP4$2.box(MP4$2.types.trex, data); } // Movie fragment box static moof(meta, baseMediaDecodeTime) { return MP4$2.box(MP4$2.types.moof, MP4$2.mfhd(meta.sequenceNumber), MP4$2.traf(meta, baseMediaDecodeTime)); } // static mfhd(sequenceNumber) { let data = new Uint8Array([0x00, 0x00, 0x00, 0x00, sequenceNumber >>> 24 & 0xFF, // sequence_number: int32 sequenceNumber >>> 16 & 0xFF, sequenceNumber >>> 8 & 0xFF, sequenceNumber & 0xFF]); return MP4$2.box(MP4$2.types.mfhd, data); } // Track fragment box static traf(meta, baseMediaDecodeTime) { let trackId = meta.id; // Track fragment header box let tfhd = MP4$2.box(MP4$2.types.tfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) & flags trackId >>> 24 & 0xFF, // track_ID trackId >>> 16 & 0xFF, trackId >>> 8 & 0xFF, trackId & 0xFF])); // Track Fragment Decode Time let tfdt = MP4$2.box(MP4$2.types.tfdt, new Uint8Array([0x00, 0x00, 0x00, 0x00, // version(0) & flags baseMediaDecodeTime >>> 24 & 0xFF, // baseMediaDecodeTime: int32 baseMediaDecodeTime >>> 16 & 0xFF, baseMediaDecodeTime >>> 8 & 0xFF, baseMediaDecodeTime & 0xFF])); let sdtp = MP4$2.sdtp(meta); let trun = MP4$2.trun(meta, sdtp.byteLength + 16 + 16 + 8 + 16 + 8 + 8); return MP4$2.box(MP4$2.types.traf, tfhd, tfdt, trun, sdtp); } // Sample Dependency Type box static sdtp(meta) { let data = new Uint8Array(4 + 1); let flags = meta.flags; data[4] = flags.isLeading << 6 | flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy; return MP4$2.box(MP4$2.types.sdtp, data); } // trun static trun(meta, offset) { let dataSize = 12 + 16; let data = new Uint8Array(dataSize); offset += 8 + dataSize; data.set([0x00, 0x00, 0x0F, 0x01, // version(0) & flags 0x00, 0x00, 0x00, 0x01, // sample_count offset >>> 24 & 0xFF, // data_offset offset >>> 16 & 0xFF, offset >>> 8 & 0xFF, offset & 0xFF], 0); let duration = meta.duration; let size = meta.size; let flags = meta.flags; let cts = meta.cts; data.set([duration >>> 24 & 0xFF, // sample_duration duration >>> 16 & 0xFF, duration >>> 8 & 0xFF, duration & 0xFF, size >>> 24 & 0xFF, // sample_size size >>> 16 & 0xFF, size >>> 8 & 0xFF, size & 0xFF, flags.isLeading << 2 | flags.dependsOn, // sample_flags flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.isNonSync, 0x00, 0x00, // sample_degradation_priority cts >>> 24 & 0xFF, // sample_composition_time_offset cts >>> 16 & 0xFF, cts >>> 8 & 0xFF, cts & 0xFF], 12); return MP4$2.box(MP4$2.types.trun, data); } // mdat static mdat(data) { return MP4$2.box(MP4$2.types.mdat, data); } } MP4$2.init(); const mpegAudioV10SampleRateTable = [44100, 48000, 32000, 0]; const mpegAudioV20SampleRateTable = [22050, 24000, 16000, 0]; const mpegAudioV25SampleRateTable = [11025, 12000, 8000, 0]; const mpegAudioL1BitRateTable = [0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, -1]; const mpegAudioL2BitRateTable = [0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, -1]; const mpegAudioL3BitRateTable = [0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, -1]; function parseMp3AudioSpecificConfig(mp3frame) { if (mp3frame.length < 4) { console.error(`Invalid MP3 packet, header missing!`); return; } let array = new Uint8Array(mp3frame.buffer); let result = null; if (array[0] !== 0xFF) { console.error(`Invalid MP3 packet, first byte != 0xFF `); return; } let ver = array[1] >>> 3 & 0x03; let layer = (array[1] & 0x06) >> 1; let bitrate_index = (array[2] & 0xF0) >>> 4; let sampling_freq_index = (array[2] & 0x0C) >>> 2; let channel_mode = array[3] >>> 6 & 0x03; let channel_count = channel_mode !== 3 ? 2 : 1; let sample_rate = 0; let bit_rate = 0; let codec = 'mp3'; switch (ver) { case 0: // MPEG 2.5 sample_rate = mpegAudioV25SampleRateTable[sampling_freq_index]; break; case 2: // MPEG 2 sample_rate = mpegAudioV20SampleRateTable[sampling_freq_index]; break; case 3: // MPEG 1 sample_rate = mpegAudioV10SampleRateTable[sampling_freq_index]; break; } switch (layer) { case 1: if (bitrate_index < mpegAudioL3BitRateTable.length) { bit_rate = mpegAudioL3BitRateTable[bitrate_index]; } break; case 2: if (bitrate_index < mpegAudioL2BitRateTable.length) { bit_rate = mpegAudioL2BitRateTable[bitrate_index]; } break; case 3: if (bitrate_index < mpegAudioL1BitRateTable.length) { bit_rate = mpegAudioL1BitRateTable[bitrate_index]; } break; } result = { bitRate: bit_rate, samplingRate: sample_rate, channelCount: channel_count, codec: codec, originalCodec: codec, audioType: 'mp3' }; return result; } class MediaSource$3 extends Emitter { constructor(player) { super(); this.TAG_NAME = 'MediaSource'; this.player = player; this._resetInIt(); // Use ManagedMediaSource only if w3c MediaSource is not available (e.g. iOS Safari) this._useManagedMediaSource = 'ManagedMediaSource' in self && !('MediaSource' in self); this.mediaSource = this._useManagedMediaSource ? new self.ManagedMediaSource() : new self.MediaSource(); // if not check first frame is iframe, will set isVideoFirstIFrame = true this.isDecodeFirstIIframe = isFalse(player._opt.checkFirstIFrame) ? true : false; // this.mediaSourceObjectURL = null; if (!this._useManagedMediaSource) { this.mediaSourceObjectURL = window.URL.createObjectURL(this.mediaSource); } this.isSupportVideoFrameCallback = supportVideoFrameCallback(); this.canvasRenderInterval = null; if (player._opt.mseUseCanvasRender) { this.$videoElement = document.createElement('video'); if (this._useManagedMediaSource) { // Apple ManagedMediaSource this.$videoElement['disableRemotePlayback'] = true; this.$videoElement.srcObject = this.mediaSource; } else { this.$videoElement.src = this.mediaSourceObjectURL; } this.initVideoEvents(); } else { if (this._useManagedMediaSource) { // Apple ManagedMediaSource this.player.video.$videoElement['disableRemotePlayback'] = true; this.player.video.$videoElement.srcObject = this.mediaSource; } else { this.player.video.$videoElement.src = this.mediaSourceObjectURL; } this.$videoElement = this.player.video.$videoElement; } this._bindMediaSourceEvents(); this.audioSourceBufferCheckTimeout = null; this.audioSourceNoDataCheckTimeout = null; if (this.player.isPlayback()) { this.player.on(EVENTS.playbackPause, flag => { // play if (isFalse(flag)) { if (isTrue(player._opt.checkFirstIFrame)) { this.player.debug.log(this.TAG_NAME, `playbackPause is false and _opt.checkFirstIFrame is true so set isDecodeFirstIIframe = false`); this.isDecodeFirstIIframe = false; } this.clearUpAllSourceBuffer(); // this.$videoElement.currentTime = 0; this.$videoElement.play(); } else { // pause this.$videoElement.pause(); this.cacheTrack = {}; } }); } // listen visiblity change // 这个考虑移动到video 里面就行了。 this.player.on(EVENTS.visibilityChange, flag => { if (flag) { // setTimeout(() => { if (this.player.isPlaying() && this.$videoElement) { const lastTime = this.getVideoBufferLastTime(); if (lastTime > this.$videoElement.currentTime) { this.player.debug.log(this.TAG_NAME, `visibilityChange is true and lastTime is ${lastTime} and currentTime is ${this.$videoElement.currentTime} so set currentTime to lastTime`); this.$videoElement.currentTime = lastTime; } } }, 300); } }); if (this._useManagedMediaSource) { this.player.debug.log(this.TAG_NAME, 'init and using ManagedMediaSource'); } else { this.player.debug.log(this.TAG_NAME, 'init'); } } destroy() { this.stop(); this._clearAudioSourceBufferCheckTimeout(); this._clearAudioNoDataCheckTimeout(); this._stopCanvasRender(); if (this.eventListenList.length) { this.eventListenList.forEach(item => item()); this.eventListenList = []; } if (this.supportVideoFrameCallbackHandle && this.$videoElement) { this.$videoElement.cancelVideoFrameCallback(this.supportVideoFrameCallbackHandle); this.supportVideoFrameCallbackHandle = null; } if (this.$videoElement) { if (this.player._opt.mseUseCanvasRender) { if (this.$videoElement.src) { this.$videoElement.src = ''; this.$videoElement.removeAttribute('src'); } } this.$videoElement = null; } if (this.mediaSourceObjectURL) { window.URL.revokeObjectURL(this.mediaSourceObjectURL); this.mediaSourceObjectURL = null; } this._resetInIt(); this.off(); this.player.debug.log(this.TAG_NAME, 'destroy'); } needInitAudio() { return this.player._opt.hasAudio && this.player._opt.mseDecodeAudio; } _resetInIt() { this.isAvc = null; // this.isAAC = null; // this.videoMeta = {}; this.audioMeta = {}; this.sourceBuffer = null; this.audioSourceBuffer = null; this.hasInit = false; this.hasAudioInit = false; this.isInitInfo = false; this.isAudioInitInfo = false; this.audioMimeType = ''; this.videoMimeType = ''; this.cacheTrack = {}; this.cacheAudioTrack = {}; this.timeInit = false; this.sequenceNumber = 0; this.audioSequenceNumber = 0; this.firstRenderTime = null; this.firstAudioTime = null; // 暂时不用 this.$videoElement = null; this.mediaSourceAppendBufferFull = false; this.mediaSourceAppendBufferError = false; this.mediaSourceAddSourceBufferError = false; this.mediaSourceBufferError = false; this.mediaSourceError = false; this.prevTimestamp = null; this.decodeDiffTimestamp = null; this.prevDts = null; this.prevAudioDts = null; this.prevPayloadBufferSize = 0; this.isWidthOrHeightChanged = false; this.prevTs = null; this.prevAudioTs = null; this.eventListenList = []; this.pendingRemoveRanges = []; this.pendingSegments = []; this.pendingAudioRemoveRanges = []; this.pendingAudioSegments = []; this.supportVideoFrameCallbackHandle = null; } get state() { return this.mediaSource && this.mediaSource.readyState; } // source 打开,并且准备接受通过 sourceBuffer.appendBuffer 添加的数据。 get isStateOpen() { return this.state === MEDIA_SOURCE_STATE.open; } // 当前 MS 没有和 media element(比如:video.src) 相关联。创建时,MS 就是该状态 get isStateClosed() { return this.state === MEDIA_SOURCE_STATE.closed; } // 当 endOfStream() 执行完成,会变为该状态,此时,source 依然和 media element 连接。 get isStateEnded() { return this.state === MEDIA_SOURCE_STATE.ended; } // 获得当前媒体播放的时间,既可以设置(get),也可以获取(set)。单位为 s(秒) get duration() { return this.mediaSource && this.mediaSource.duration || -1; } set duration(duration) { this.mediaSource.duration = duration; } _bindMediaSourceEvents() { const { debug, events: { proxy } } = this.player; // 当 "closed" to "open" 或者 "ended" to "open" 时触发。 const sourceOpenProxyDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.sourceOpen, () => { this.player.debug.log(this.TAG_NAME, 'sourceOpen'); this._onMediaSourceSourceOpen(); this.player.emit(EVENTS.mseSourceOpen); }); //当 "open" to "closed" 或者 "ended" to "closed" 时触发。 const sourceCloseProxyDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.sourceClose, () => { this.player.debug.log(this.TAG_NAME, 'sourceClose'); this.player.emit(EVENTS.mseSourceClose); }); // 当 "open" to "ended" 时触发 const sourceendedProxyDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.sourceended, () => { this.player.debug.log(this.TAG_NAME, 'sourceended'); this.player.emit(EVENTS.mseSourceended); }); this.eventListenList.push(sourceOpenProxyDestroy, sourceCloseProxyDestroy, sourceendedProxyDestroy); // 监听 change // tips:在窗口不是最小化的情况下,会一直触发,但是一旦最小化,就不会触发了。 const timeUpdateProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.timeUpdate, event => { // for 降级处理用的。 if (isFalse(this.isSupportVideoFrameCallback)) { if (this.player.checkIsInRender()) { this.player.handleRender(); } else { const ts = parseInt(event.timeStamp, 10); this.player.debug.log(this.TAG_NAME, `mseUseCanvasRender is ${this.player._opt.mseUseCanvasRender} and $videoElement ts is ${ts}, but not in render and vbps is ${this.player._stats.vbps} and fps is ${this.player._stats.fps}`); } } }); const ratechangeProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.ratechange, () => { this.player.debug.log(this.TAG_NAME, 'video playback Rate change', this.$videoElement && this.$videoElement.playbackRate); if (this.$videoElement && this.$videoElement.paused) { this.player.debug.log(this.TAG_NAME, 'video is paused and next try to replay'); this.$videoElement.play(); } }); this.eventListenList.push(timeUpdateProxyDestroy, ratechangeProxyDestroy); if (this._useManagedMediaSource) { const startstreamingDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.startstreaming, () => { this.player.debug.log(this.TAG_NAME, 'ManagedMediaSource startstreaming'); this.player.emit(EVENTS.mseSourceStartStreaming); }); const endstreamingDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.endstreaming, () => { this.player.debug.log(this.TAG_NAME, 'ManagedMediaSource endstreaming'); this.player.emit(EVENTS.mseSourceEndStreaming); }); const qualitychangeDestroy = proxy(this.mediaSource, MEDIA_SOURCE_EVENTS.qualitychange, () => { this.player.debug.log(this.TAG_NAME, 'ManagedMediaSource qualitychange'); }); this.eventListenList.push(startstreamingDestroy, endstreamingDestroy, qualitychangeDestroy); } } _onMediaSourceSourceOpen() { // 1. remove evnet listener // 2. check _pendingSourceBufferInit list // 3. do Append Segments if (!this.sourceBuffer) { this.player.debug.log('MediaSource', 'onMediaSourceSourceOpen() sourceBuffer is null and next init'); this._initSourceBuffer(); } if (!this.audioSourceBuffer) { this.player.debug.log('MediaSource', 'onMediaSourceSourceOpen() audioSourceBuffer is null and next init'); this._initAudioSourceBuffer(); } // append immediately only if init segment in subsequence if (this._hasPendingSegments()) { this._doAppendSegments(); } } initVideoEvents() { const { proxy } = this.player.events; const canplayProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.canplay, () => { this.player.debug.log(this.TAG_NAME, 'video canplay'); this.$videoElement.play().then(() => { this.player.emit(EVENTS.removeLoadingBgImage); if (supportVideoFrameCallback()) { this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } else { // hls265 for canvas render if (this.player.isUseHls265()) { this._stopCanvasRender(); this.canvasRenderInterval = setInterval(() => { this.player.video.render({ $video: this.$videoElement, ts: parseInt(this.$videoElement.currentTime * 1000, 10) || 0 }); }, 1000 / 25); } } this.player.debug.log(this.TAG_NAME, 'video play'); }).catch(e => { // DOMException: play() failed because the user didn't interact with the document first. https://goo.gl/xX8pDD this.player.debug.error(this.TAG_NAME, 'video play error ', e); this.player.emitError(EVENTS_ERROR.mediaSourceUseCanvasRenderPlayFailed, e); }); }); const waitingProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.waiting, () => { this.player.debug.log(this.TAG_NAME, 'video waiting'); }); // const timeUpdateProxyDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.timeUpdate, event => { parseInt(event.timeStamp, 10); // this.player.emit(EVENTS.videoTimeUpdate, timeStamp); // check video is playing if (this.$videoElement.paused) { this.player.debug.warn(this.TAG_NAME, 'video is paused and next try to replay'); this.$videoElement.play().then(() => { this.player.debug.log(this.TAG_NAME, 'video is paused and replay success'); }).catch(e => { this.player.debug.warn(this.TAG_NAME, 'video is paused and replay error ', e); }); } }); this.eventListenList.push(canplayProxyDestroy, waitingProxyDestroy, timeUpdateProxyDestroy); } videoFrameCallback(now) { let metaData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (this.player.isDestroyedOrClosed()) { this.player.debug.log(this.TAG_NAME, 'videoFrameCallback() player is destroyed'); return; } const ts = parseInt(Math.max(metaData.mediaTime, this.$videoElement.currentTime) * 1000, 10) || 0; this.player.handleRender(); this.player.video.render({ $video: this.$videoElement, ts }); if (this.player.isUseHls265()) { this.player.updateStats({ fps: true, ts: ts }); } this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } decodeVideo(payload, ts, isIframe, cts) { const player = this.player; if (!player) { return; } if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo() player is destroyed'); return; } if (!this.hasInit) { // this.player.debug.warn(this.TAG_NAME, `decodeVideo has not init , isIframe is ${isIframe} , payload is ${payload[1]}`) if (isIframe && payload[1] === AVC_PACKET_TYPE.sequenceHeader) { const videoCodec = payload[0] & 0x0F; player.video.updateVideoInfo({ encTypeCode: videoCodec }); // windows 下面的 360 浏览器是支持 mse 解码播放的。 // edge 也是支持 mse 解码播放的。 if (videoCodec === VIDEO_ENC_CODE.h265 && isFalse(supportMSEDecodeHevc())) { // this.emit(EVENTS_ERROR.mediaSourceH265NotSupport) this.player.emitError(EVENTS_ERROR.mediaSourceH265NotSupport); return; } if (!player._times.decodeStart) { player._times.decodeStart = now$2(); } this.hasInit = this._decodeConfigurationRecord(payload, ts, isIframe, videoCodec); } else { this.player.debug.warn(this.TAG_NAME, `decodeVideo has not init , isIframe is ${isIframe} , payload is ${payload[1]}`); } } else { if (!this.isDecodeFirstIIframe && isIframe) { this.isDecodeFirstIIframe = true; } if (this.isDecodeFirstIIframe) { // check video width or height is changed if (isIframe && payload[1] === 0) { const videoCodec = payload[0] & 0x0F; let config = {}; if (videoCodec === VIDEO_ENC_CODE.h264) { let data = payload.slice(5); config = parseAVCDecoderConfigurationRecord(data); } else if (videoCodec === VIDEO_ENC_CODE.h265) { config = parseHEVCDecoderConfigurationRecord$2(payload); } const videoInfo = this.player.video.videoInfo; if (videoInfo && videoInfo.width && videoInfo.height && config && config.codecWidth && config.codecHeight && (config.codecWidth !== videoInfo.width || config.codecHeight !== videoInfo.height)) { this.player.debug.warn(this.TAG_NAME, ` decodeVideo: video width or height is changed, old width is ${videoInfo.width}, old height is ${videoInfo.height}, new width is ${config.codecWidth}, new height is ${config.codecHeight}, and emit change event`); this.isWidthOrHeightChanged = true; this.player.emitError(EVENTS_ERROR.mseWidthOrHeightChange); } } if (this.isWidthOrHeightChanged) { this.player.debug.warn(this.TAG_NAME, `decodeVideo: video width or height is changed, and return`); return; } if (isVideoSequenceHeader(payload)) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo and payload is video sequence header so drop this frame'); return; } if (payload.byteLength < VIDEO_PAYLOAD_MIN_SIZE) { this.player.debug.warn(this.TAG_NAME, `decodeVideo and payload is too small , payload length is ${payload.byteLength}`); return; } let dts = ts; // just for player if (this.player.isPlayer()) { if (this.firstRenderTime === null) { this.firstRenderTime = ts; } dts = ts - this.firstRenderTime; if (dts < 0) { this.player.debug.warn(this.TAG_NAME, `decodeVideo local dts is < 0 , ts is ${ts} and prevTs is ${this.prevTs}, firstRenderTime is ${this.firstRenderTime} and mseCorrectTimeDuration is ${this.player._opt.mseCorrectTimeDuration}`); if (this.prevDts === null) { dts = 0; } else { dts = this.prevDts + this.player._opt.mseCorrectTimeDuration; } // if (this._checkTsIsMaxDiff(ts)) { this.player.debug.warn(this.TAG_NAME, `decodeVideo is max diff , ts is ${ts} and prevTs is ${this.prevTs}, diff is ${this.prevTs - ts} and emit replay`); this.player.emitError(EVENTS_ERROR.mediaSourceTsIsMaxDiff, `decodeVideo is max diff, prevTs is ${this.prevTs} and ts is ${ts}`); return; } } if (this.prevDts !== null && dts <= this.prevDts) { this.player.debug.warn(this.TAG_NAME, ` decodeVideo dts is less than(or equal) prev dts , dts is ${dts} and prev dts is ${this.prevDts} , and now ts is ${ts} and prev ts is ${this.prevTs} , and diff is ${ts - this.prevTs} and firstRenderTime is ${this.firstRenderTime} and isIframe is ${isIframe}, and mseCorrectTimeDuration is ${this.player._opt.mseCorrectTimeDuration}, and prevPayloadBufferSize is ${this.prevPayloadBufferSize} and payload size is ${payload.byteLength}`); if (dts === this.prevDts) { if (this.prevPayloadBufferSize === payload.byteLength) { this.player.debug.warn(this.TAG_NAME, 'decodeVideo dts is equal to prev dts and payload size is equal to prev payload size so drop this frame'); return; } } dts = this.prevDts + this.player._opt.mseCorrectTimeDuration; if (this._checkTsIsMaxDiff(ts)) { this.player.debug.warn(this.TAG_NAME, `decodeVideo is max diff , ts is ${ts} and prevTs is ${this.prevTs}, diff is ${this.prevTs - ts} and emit replay`); this.emit(EVENTS_ERROR.mediaSourceTsIsMaxDiff, `decodeVideo is max diff, prevTs is ${this.prevTs} and ts is ${ts}`); return; } } } if (this.player.isPlayer()) { this._decodeVideo(payload, dts, isIframe, cts, ts); } else if (this.player.isPlayback()) { if (isFalse(this.player.playbackPause)) { // is not pause if (this.player.playback.isUseLocalCalculateTime) { this.player.playback.increaseLocalTimestamp(); } this._decodeVideo(payload, dts, isIframe, cts, ts); } } this.prevDts = dts; this.prevPayloadBufferSize = payload.byteLength; this.prevTs = ts; // 如果是video // if (this.player.getRenderType() === RENDER_TYPE.video && this.player.video) { // this.player.video.doAddContentToWatermark(); // } } else { this.player.debug.log(this.TAG_NAME, 'decodeVideo first frame is not iFrame'); } } } decodeAudio(payload, ts) { const player = this.player; if (!player) { return; } if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'decodeAudio() player is destroyed'); return; } if (isFalse(this.hasAudioInit)) { this.hasAudioInit = this._decodeAudioConfigurationRecord(payload, ts); } else { let dts = ts; // body check if (isAacCodecPacket(payload)) { this.player.debug.log(this.TAG_NAME, 'decodeAudio and has already initialized and payload is aac codec packet so drop this frame'); return; } this._clearAudioNoDataCheckTimeout(); if (this.player.isPlayer()) { if (this.firstAudioTime === null) { this.firstAudioTime = ts; if (this.firstRenderTime !== null && this.prevTs !== null) { // If the video has been played for more than 300ms, this is the audio data, // it is necessary to reduce the start time of the video difference (prevTs - firstRenderTime). const diff = Math.abs(this.firstRenderTime - this.prevTs); if (diff > 300) { this.firstAudioTime -= diff; this.player.debug.warn(this.TAG_NAME, `video firstAudioTime is ${this.firstRenderTime} and current time is ${this.prevTs} play time is ${diff} and firstAudioTime ${ts} - ${diff} = ${this.firstAudioTime}`); } } } dts = ts - this.firstAudioTime; if (dts < 0) { this.player.debug.warn(this.TAG_NAME, `decodeAudio local dts is < 0 , ts is ${ts} and prevTs is ${this.prevAudioTs}, firstAudioTime is ${this.firstAudioTime}`); if (this.prevAudioDts === null) { dts = 0; } else { dts = this.prevAudioDts + this.player._opt.mseCorrectAudioTimeDuration; } if (this._checkAudioTsIsMaxDiff(ts)) { this.player.debug.warn(this.TAG_NAME, `decodeAudio is max diff , ts is ${ts} and prevTs is ${this.prevAudioDts}, diff is ${this.prevAudioDts - ts} and emit replay`); this.player.emitError(EVENTS_ERROR.mediaSourceTsIsMaxDiff, `decodeAudio is max diff, prevTs is ${this.prevAudioDts} and ts is ${ts}`); return; } } if (this.prevAudioTs !== null && dts <= this.prevAudioDts) { this.player.debug.warn(this.TAG_NAME, ` decodeAudio dts is less than(or equal) prev dts , dts is ${dts} and prev dts is ${this.prevAudioDts} , and now ts is ${ts} and prev ts is ${this.prevAudioTs} , and diff is ${ts - this.prevAudioTs}`); dts = this.prevAudioDts + this.player._opt.mseCorrectAudioTimeDuration; } } if (this.player.isPlayer()) { this._decodeAudio(payload, dts, ts); } else if (this.player.isPlayback()) { if (isFalse(this.player.playbackPause)) { this._decodeAudio(payload, dts, ts); } } this.prevAudioTs = ts; this.prevAudioDts = dts; } } _checkTsIsMaxDiff(ts) { return this.prevTs > 0 && ts < this.prevTs && this.prevTs - ts > FRAME_TS_MAX_DIFF; } _checkAudioTsIsMaxDiff(ts) { return this.prevAudioTs > 0 && ts < this.prevAudioTs && this.prevAudioTs - ts > FRAME_TS_MAX_DIFF; } _decodeConfigurationRecord(payload, ts, isIframe, videoCodec) { let data = payload.slice(5); let config = {}; // this.player.debug.log(this.TAG_NAME, `_decodeConfigurationRecord videoCodec is ${videoCodec}`) if (videoCodec === VIDEO_ENC_CODE.h264) { config = parseAVCDecoderConfigurationRecord(data); // config = parseAVCDecoderConfigurationRecord$2(payload) } else if (videoCodec === VIDEO_ENC_CODE.h265) { // config = parseHEVCDecoderConfigurationRecord(data); // config = parseHEVCDecoderConfigurationRecord$2(payload); config = parseHEVCDecoderConfigurationRecord$4(data); } // todo:just for recorder if (this.player.recorder && this.player._opt.recordType === FILE_SUFFIX.mp4) { this.player.recorder.initMetaData(payload, videoCodec); } if (config.codecWidth === 0 && config.codecHeight === 0) { this.player.debug.warn(this.TAG_NAME, '_decodeConfigurationRecord', config); this.player.emitError(EVENTS_ERROR.mediaSourceDecoderConfigurationError); return false; } const metaData = { id: AV_TRACK_ID.video, // video tag data type: 'video', timescale: 1000, duration: 0, avcc: data, codecWidth: config.codecWidth, codecHeight: config.codecHeight, videoType: config.videoType }; // ftyp const metaBox = MP4$2.generateInitSegment(metaData); this.isAvc = videoCodec === VIDEO_ENC_CODE.h264; let codec = config.codec; if (codec) { this.videoMimeType = `video/mp4; codecs="${config.codec}"`; } else { this.videoMimeType = this.isAvc ? MP4_CODECS.avc : MP4_CODECS.hev; } this._initSourceBuffer(); this.appendBuffer(metaBox.buffer); this.sequenceNumber = 0; this.cacheTrack = {}; this.timeInit = false; return true; } _decodeAudioConfigurationRecord(payload, ts) { const codecId = payload[0] >> 4; const isMp3 = codecId === AUDIO_ENC_CODE.MP3; const isAAC = codecId === AUDIO_ENC_CODE.AAC; if (isFalse(isAAC || isMp3)) { // inner error, not emit error event。 // inner change audio engine this.player.debug.warn(this.TAG_NAME, `_decodeAudioConfigurationRecord audio codec is not support , codecId is ${codecId} ant auto wasm decode`); this.player.emit(EVENTS_ERROR.mediaSourceAudioG711NotSupport); return false; } const metaData = { id: AV_TRACK_ID.audio, type: 'audio', timescale: 1000 }; let metaInfo = {}; if (isAacCodecPacket(payload)) { const extraData = payload.slice(2); metaInfo = parseAACAudioSpecificConfig(extraData); if (metaInfo) { metaData.audioSampleRate = metaInfo.sampleRate; metaData.channelCount = metaInfo.channelCount; metaData.config = metaInfo.config; metaData.refSampleDuration = 1024 / metaData.audioSampleRate * metaData.timescale; } else { return false; } } else if (isMp3) { // mp3 // mp3 没有extradata信息,这里的extradata 就是第一帧mp3 metaInfo = parseMp3AudioSpecificConfig(payload); if (metaInfo) { metaData.audioSampleRate = metaInfo.samplingRate; metaData.channelCount = metaInfo.channelCount; metaData.refSampleDuration = 1152 / metaData.audioSampleRate * metaData.timescale; } else { return false; } } else { return false; } metaData.codec = metaInfo.codec; metaData.duration = 0; let container = 'mp4'; let codec = metaInfo.codec; let metaBox = null; if (isMp3 && isFalse(isFirefox())) { // 'audio/mpeg' for MP3 audio track container = 'mpeg'; codec = ''; metaBox = new Uint8Array(); } else { // 'audio/mp4, codecs="codec"' metaBox = MP4$2.generateInitSegment(metaData); } // console.error('metaData', metaData); // console.error('metaBox', metaBox); let mimeType = `${metaData.type}/${container}`; if (codec && codec.length > 0) { mimeType += `;codecs=${codec}`; } if (isFalse(this.isAudioInitInfo)) { this.player.audio.updateAudioInfo({ encTypeCode: codecId, channels: metaData.channelCount, sampleRate: metaData.audioSampleRate }); this.isAudioInitInfo = true; } this.audioMimeType = mimeType; this.isAAC = isAAC; // this.player.debug.log(this.TAG_NAME, `_decodeAudioConfigurationRecord mimeType is ${mimeType} and isAAC is ${isAAC}`); this._initAudioSourceBuffer(); this.appendAudioBuffer(metaBox.buffer); return true; } _initSourceBuffer() { const { debug, events: { proxy } } = this.player; if (this.sourceBuffer === null && this.mediaSource !== null && this.isStateOpen && this.videoMimeType) { try { this.sourceBuffer = this.mediaSource.addSourceBuffer(this.videoMimeType); debug.log(this.TAG_NAME, '_initSourceBuffer() this.mediaSource.addSourceBuffer()', this.videoMimeType); } catch (e) { debug.error(this.TAG_NAME, 'appendBuffer() this.mediaSource.addSourceBuffer()', e.code, e); // need throw error and change to use wasm play this.player.emitError(EVENTS_ERROR.mseAddSourceBufferError, e); this.mediaSourceAddSourceBufferError = true; return; } if (this.sourceBuffer) { const sourceBufferErrorProxyDestroy = proxy(this.sourceBuffer, 'error', error => { this.mediaSourceBufferError = true; debug.error(this.TAG_NAME, 'mseSourceBufferError this.sourceBuffer', error); this.player.emitError(EVENTS_ERROR.mseSourceBufferError, error); }); const updateendProxyDestroy = proxy(this.sourceBuffer, 'updateend', () => { if (this._hasPendingRemoveRanges()) { // debug.log(this.TAG_NAME, 'this.sourceBuffer updateend and has pending remove ranges'); this._doRemoveRanges(); } else if (this._hasPendingSegments()) { // debug.log(this.TAG_NAME, 'this.sourceBuffer updateend and has pending segments'); this._doAppendSegments(); } }); this.eventListenList.push(sourceBufferErrorProxyDestroy, updateendProxyDestroy); } } else { debug.log(this.TAG_NAME, `_initSourceBuffer and this.isStateOpen is ${this.isStateOpen} and this.isAvc === null is ${this.isAvc === null}`); } } _initAudioSourceBuffer() { const { debug, events: { proxy } } = this.player; if (this.audioSourceBuffer === null && this.mediaSource !== null && this.isStateOpen && this.audioMimeType) { try { this.audioSourceBuffer = this.mediaSource.addSourceBuffer(this.audioMimeType); this._clearAudioSourceBufferCheckTimeout(); debug.log(this.TAG_NAME, '_initAudioSourceBuffer() this.mediaSource.addSourceBuffer()', this.audioMimeType); } catch (e) { debug.error(this.TAG_NAME, 'appendAudioBuffer() this.mediaSource.addSourceBuffer()', e.code, e); // need throw error and change to use wasm play this.player.emitError(EVENTS_ERROR.mseAddSourceBufferError, e); this.mediaSourceAddSourceBufferError = true; return; } if (this.audioSourceBuffer) { const sourceBufferErrorProxyDestroy = proxy(this.audioSourceBuffer, 'error', error => { this.mediaSourceBufferError = true; debug.error(this.TAG_NAME, 'mseSourceBufferError this.audioSourceBuffer', error); this.player.emitError(EVENTS_ERROR.mseSourceBufferError, error); }); const updateendProxyDestroy = proxy(this.audioSourceBuffer, 'updateend', () => { if (this._hasPendingRemoveRanges()) { // debug.log(this.TAG_NAME, 'this.audioSourceBuffer updateend and has pending remove ranges'); this._doRemoveRanges(); } else if (this._hasPendingSegments()) { // debug.log(this.TAG_NAME, 'this.audioSourceBuffer updateend and has pending segments'); this._doAppendSegments(); } }); this.eventListenList.push(sourceBufferErrorProxyDestroy, updateendProxyDestroy); if (this.audioSourceNoDataCheckTimeout === null) { this.audioSourceNoDataCheckTimeout = setTimeout(() => { // this.player.debug.warn(this.TAG_NAME, '_doAppendSegments() init audio timeout and set _opt.hasAudio is false'); // this.player._opt.hasAudio = false; // this.audioSourceBufferCheckTimeout = null; this._clearAudioNoDataCheckTimeout(); this.player.emit(EVENTS_ERROR.mediaSourceAudioNoDataTimeout); }, 1000); } } } else { debug.log(this.TAG_NAME, `_initAudioSourceBuffer and this.isStateOpen is ${this.isStateOpen} and this.audioMimeType is ${this.audioMimeType}`); } } // _decodeVideo(payload, dts, isIframe, cts, ts) { const player = this.player; let arrayBuffer = payload.slice(5); let bytes = arrayBuffer.byteLength; if (bytes === 0) { player.debug.warn(this.TAG_NAME, '_decodeVideo payload bytes is 0 and return'); return; } let nowTime = new Date().getTime(); let isFirst = false; if (!this.prevTimestamp) { this.prevTimestamp = nowTime; isFirst = true; } const diffTime = nowTime - this.prevTimestamp; this.decodeDiffTimestamp = diffTime; if (diffTime > 500 && !isFirst && this.player.isPlayer()) { player.debug.warn(this.TAG_NAME, `_decodeVideo now time is ${nowTime} and prev time is ${this.prevTimestamp}, diff time is ${diffTime} ms`); } const $video = this.$videoElement; // const maxDelay = player._opt.videoBufferDelay + player._opt.videoBuffer; // if ($video.buffered.length > 1) { // this.removeBuffer($video.buffered.start(0), $video.buffered.end(0)); // this.timeInit = false; // } if (this.cacheTrack.id && dts >= this.cacheTrack.dts) { // 需要额外加8个size let mdatBytes = 8 + this.cacheTrack.size; let mdatbox = new Uint8Array(mdatBytes); mdatbox[0] = mdatBytes >>> 24 & 255; mdatbox[1] = mdatBytes >>> 16 & 255; mdatbox[2] = mdatBytes >>> 8 & 255; mdatbox[3] = mdatBytes & 255; // mdat box用来存储视频碎片数据, mdatbox.set(MP4$2.types.mdat, 4); mdatbox.set(this.cacheTrack.data, 8); this.cacheTrack.duration = dts - this.cacheTrack.dts; // moof // moof box仅在流式MP4中使用,用于将多个sample组合成一个fragment。 // moof 用来描述mdat // console.error('cacheTrack', this.cacheTrack, this.cacheTrack.dts); let moofbox = MP4$2.moof(this.cacheTrack, this.cacheTrack.dts); // 用完清空数据。 this.cacheTrack = {}; let result = new Uint8Array(moofbox.byteLength + mdatbox.byteLength); result.set(moofbox, 0); result.set(mdatbox, moofbox.byteLength); // appendBuffer this.appendBuffer(result.buffer); // player.debug.log(this.TAG_NAME, 'decode ts is', dts,ts); player.emit(EVENTS.timeUpdate, ts); // if (player.isPlayer()) { if (player.isUseHls265()) { player.updateStats({ dfps: true, mseTs: dts }); } else { player.updateStats({ fps: true, dfps: true, ts: ts, mseTs: dts }); } } else if (player.isPlayback()) { player.playback.updateStats({ ts: ts }); } if (!player._times.videoStart) { player._times.videoStart = now$2(); player.handlePlayToRenderTimes(); } } else { player.debug.log(this.TAG_NAME, `timeInit set false , cacheTrack = {} now dts is ${dts}, and ts is ${ts} cacheTrack dts is ${this.cacheTrack && this.cacheTrack.dts}`); this.timeInit = false; this.cacheTrack = {}; } if (!this.cacheTrack) { this.cacheTrack = {}; } this.cacheTrack.id = AV_TRACK_ID.video; this.cacheTrack.sequenceNumber = ++this.sequenceNumber; this.cacheTrack.size = bytes; this.cacheTrack.dts = dts; this.cacheTrack.cts = cts; this.cacheTrack.isKeyframe = isIframe; this.cacheTrack.data = arrayBuffer; // this.cacheTrack.flags = { isLeading: 0, dependsOn: isIframe ? 2 : 1, isDependedOn: isIframe ? 1 : 0, hasRedundancy: 0, isNonSync: isIframe ? 0 : 1 }; // // if (!this.timeInit && $video.buffered.length === 1) { // player.debug.log(this.TAG_NAME, 'timeInit set true'); // this.timeInit = true; // $video.currentTime = $video.buffered.end(0); // } if (!this.isInitInfo && $video.videoWidth > 0 && $video.videoHeight > 0) { player.debug.log(this.TAG_NAME, `updateVideoInfo: ${$video.videoWidth},${$video.videoHeight}`); player.video.updateVideoInfo({ width: $video.videoWidth, height: $video.videoHeight }); player.video.initCanvasViewSize(); this.isInitInfo = true; } // use canvas render if (player._opt.mseUseCanvasRender && isFalse(this.isSupportVideoFrameCallback) && isFalse(player.isUseHls265())) { player.video.render({ $video, ts: dts }); } this.prevTimestamp = new Date().getTime(); } _stopCanvasRender() { if (this.canvasRenderInterval) { clearInterval(this.canvasRenderInterval); this.canvasRenderInterval = null; } } _decodeAudio(payload, dts, ts) { this.player; let arrayBuffer = this.isAAC ? payload.slice(2) : payload.slice(1); let bytes = arrayBuffer.byteLength; if (this.cacheAudioTrack.id && dts >= this.cacheAudioTrack.dts) { // 需要额外加8个size let mdatBytes = 8 + this.cacheAudioTrack.size; let mdatbox = new Uint8Array(mdatBytes); mdatbox[0] = mdatBytes >>> 24 & 255; mdatbox[1] = mdatBytes >>> 16 & 255; mdatbox[2] = mdatBytes >>> 8 & 255; mdatbox[3] = mdatBytes & 255; // mdat box用来存储视频碎片数据, mdatbox.set(MP4$2.types.mdat, 4); mdatbox.set(this.cacheAudioTrack.data, 8); this.cacheAudioTrack.duration = dts - this.cacheAudioTrack.dts; // moof // moof box仅在流式MP4中使用,用于将多个sample组合成一个fragment。 // moof 用来描述mdat // console.error('cacheAudioTrack', this.cacheAudioTrack, this.cacheAudioTrack.dts); let moofbox = MP4$2.moof(this.cacheAudioTrack, this.cacheAudioTrack.dts); // 用完清空数据。 this.cacheAudioTrack = {}; let result = new Uint8Array(moofbox.byteLength + mdatbox.byteLength); result.set(moofbox, 0); result.set(mdatbox, moofbox.byteLength); this.appendAudioBuffer(result.buffer); } else { this.cacheAudioTrack = {}; } if (!this.cacheAudioTrack) { this.cacheAudioTrack = {}; } this.cacheAudioTrack.id = AV_TRACK_ID.audio; this.cacheAudioTrack.sequenceNumber = ++this.audioSequenceNumber; this.cacheAudioTrack.size = bytes; this.cacheAudioTrack.dts = dts; this.cacheAudioTrack.cts = 0; this.cacheAudioTrack.data = arrayBuffer; this.cacheAudioTrack.flags = { isLeading: 0, dependsOn: 1, isDependedOn: 0, hasRedundancy: 0 }; } appendBuffer(buffer) { if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'appendBuffer() player is destroyed'); return; } const { debug, events: { proxy } } = this.player; if (this.mediaSourceAddSourceBufferError) { debug.warn(this.TAG_NAME, `appendBuffer() this.mediaSourceAddSourceBufferError is true`); return; } if (this.mediaSourceAppendBufferFull) { debug.warn(this.TAG_NAME, `appendBuffer() this.mediaSourceAppendBufferFull is true`); return; } if (this.mediaSourceAppendBufferError) { debug.warn(this.TAG_NAME, `appendBuffer() this.mediaSourceAppendBufferError is true`); return; } if (this.mediaSourceBufferError) { debug.warn(this.TAG_NAME, `appendBuffer() this.mediaSourceBufferError is true`); return; } this.pendingSegments.push(buffer); if (this.sourceBuffer) { // just for player if (this.player.isPlayer()) { this._handleUpdatePlaybackRate(); } if (this.player.isPlayback()) { this._handleUpdateBufferDelayTime(); } if (this.player._opt.mseAutoCleanupSourceBuffer && this._needCleanupSourceBuffer()) { this._doCleanUpSourceBuffer(); } if (isFalse(this.getSourceBufferUpdating()) && this.isStateOpen && isFalse(this._hasPendingRemoveRanges())) { this._doAppendSegments(); return; } } if (this.isStateClosed) { this.mediaSourceBufferError = true; this.player.emitError(EVENTS_ERROR.mseSourceBufferError, 'mediaSource is not attached to video or mediaSource is closed'); } else if (this.isStateEnded) { this.mediaSourceBufferError = true; this.player.emitError(EVENTS_ERROR.mseSourceBufferError, 'mediaSource is end'); } else { if (this._hasPendingRemoveRanges()) { debug.log(this.TAG_NAME, `video has pending remove ranges and video length is ${this.pendingRemoveRanges.length}, audio length is ${this.pendingAudioRemoveRanges.length}`); } } } appendAudioBuffer(buffer) { if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'appendAudioBuffer() player is destroyed'); return; } const { debug, events: { proxy } } = this.player; if (this.mediaSourceAddSourceBufferError) { debug.warn(this.TAG_NAME, `appendAudioBuffer() this.mediaSourceAddSourceBufferError is true`); return; } if (this.mediaSourceAppendBufferFull) { debug.warn(this.TAG_NAME, `appendAudioBuffer() this.mediaSourceAppendBufferFull is true`); return; } if (this.mediaSourceAppendBufferError) { debug.warn(this.TAG_NAME, `appendAudioBuffer() this.mediaSourceAppendBufferError is true`); return; } if (this.mediaSourceBufferError) { debug.warn(this.TAG_NAME, `appendAudioBuffer() this.mediaSourceBufferError is true`); return; } this.pendingAudioSegments.push(buffer); if (this.audioSourceBuffer) { // just for player if (this.player.isPlayer()) { this._handleUpdatePlaybackRate(); } if (this.player.isPlayback()) { this._handleUpdateBufferDelayTime(); } if (this.player._opt.mseAutoCleanupSourceBuffer && this._needCleanupSourceBuffer()) { this._doCleanUpSourceBuffer(); } if (isFalse(this.getAudioSourceBufferUpdating()) && this.isStateOpen && isFalse(this._hasPendingRemoveRanges())) { this._doAppendSegments(); return; } } if (this.isStateClosed) { this.mediaSourceBufferError = true; this.player.emitError(EVENTS_ERROR.mseSourceBufferError, 'mediaSource is not attached to video or mediaSource is closed'); } else if (this.isStateEnded) { this.mediaSourceBufferError = true; this.player.emitError(EVENTS_ERROR.mseSourceBufferError, 'mediaSource is end'); } else { if (this._hasPendingRemoveRanges()) { debug.log(this.TAG_NAME, `audio has pending remove ranges and video length is ${this.pendingRemoveRanges.length}, audio length is ${this.pendingAudioRemoveRanges.length}`); } } } getSourceBufferUpdating() { return this.sourceBuffer && this.sourceBuffer.updating; } getAudioSourceBufferUpdating() { return this.audioSourceBuffer && this.audioSourceBuffer.updating; } stop() { // remove source buffer this.removeSourceBuffer(); // end of stream this.endOfStream(); // abort source buffer this.abortSourceBuffer(); } checkSourceBufferDelay() { const $video = this.$videoElement; let result = 0; let buffered = 0; if ($video.buffered.length > 0) { buffered = $video.buffered.end($video.buffered.length - 1); result = buffered - $video.currentTime; } if (result < 0) { this.player.debug.warn(this.TAG_NAME, `checkMSESourceBufferDelay ${result} < 0, and buffered is ${buffered} ,currentTime is ${$video.currentTime} , try to seek ${$video.currentTime} to ${buffered}`); $video.currentTime = buffered; result = 0; } return result; } checkSourceBufferStore() { const $video = this.$videoElement; let result = 0; if ($video.buffered.length > 0) { result = $video.currentTime - $video.buffered.start(0); } return result; } getDecodeDiffTimes() { return this.decodeDiffTimestamp; } removeBuffer(start, end) { const _isMacOsFirefox = isMacOsFirefox(); this.player.debug.log(this.TAG_NAME, `removeBuffer() start is ${start} and end is ${end} and _isMacOsFirefox is ${_isMacOsFirefox}`); if (this.isStateOpen && isFalse(_isMacOsFirefox)) { if (isFalse(this.getSourceBufferUpdating())) { try { this.sourceBuffer.remove(start, end); } catch (e) { this.player.debug.warn(this.TAG_NAME, 'removeBuffer() sourceBuffer error', e); } } if (isFalse(this.getAudioSourceBufferUpdating())) { try { this.audioSourceBuffer.remove(start, end); } catch (e) { this.player.debug.warn(this.TAG_NAME, 'removeBuffer() audioSourceBuffer error', e); } } } } // clear up all source buffer clearUpAllSourceBuffer() { // const $video = this.$videoElement; // const buffered = $video.buffered; if (this.sourceBuffer) { const buffered = this.sourceBuffer.buffered; for (let i = 0; i < buffered.length; i++) { let start = buffered.start(i); let end = buffered.end(i); this.pendingRemoveRanges.push({ start: start, end: end }); } if (isFalse(this.getSourceBufferUpdating())) { this._doRemoveRanges(); } } if (this.audioSourceBuffer) { const buffered = this.audioSourceBuffer.buffered; for (let i = 0; i < buffered.length; i++) { let start = buffered.start(i); let end = buffered.end(i); this.pendingAudioRemoveRanges.push({ start: start, end: end }); } if (isFalse(this.getAudioSourceBufferUpdating())) { this._doRemoveRanges(); } } } endOfStream() { // fix: MediaSource endOfStream before demuxer initialization completes (before HAVE_METADATA) is treated as an error if (this.isStateOpen && this.$videoElement && this.$videoElement.readyState >= 1) { try { this.player.debug.log(this.TAG_NAME, 'endOfStream()'); this.mediaSource.endOfStream(); } catch (e) { this.player.debug.warn(this.TAG_NAME, 'endOfStream() error', e); } } } abortSourceBuffer() { if (this.isStateOpen) { if (this.sourceBuffer) { try { this.player.debug.log(this.TAG_NAME, 'abortSourceBuffer() abort sourceBuffer'); this.sourceBuffer.abort(); } catch (e) {} } if (this.audioSourceBuffer) { try { this.player.debug.log(this.TAG_NAME, 'abortSourceBuffer() abort audioSourceBuffer'); this.audioSourceBuffer.abort(); } catch (e) {} } } this.sourceBuffer = null; this.audioSourceBuffer = null; } removeSourceBuffer() { if (!this.isStateClosed && this.mediaSource) { // video if (this.sourceBuffer) { try { this.player.debug.log(this.TAG_NAME, 'removeSourceBuffer() sourceBuffer'); this.mediaSource.removeSourceBuffer(this.sourceBuffer); } catch (e) { this.player.debug.warn(this.TAG_NAME, 'removeSourceBuffer() sourceBuffer error', e); } } // audio if (this.audioSourceBuffer) { try { this.player.debug.log(this.TAG_NAME, 'removeSourceBuffer() audioSourceBuffer'); this.mediaSource.removeSourceBuffer(this.audioSourceBuffer); } catch (e) { this.player.debug.warn(this.TAG_NAME, 'removeSourceBuffer() audioSourceBuffer error', e); } } } } _hasPendingSegments() { return this.pendingSegments.length > 0 || this.pendingAudioSegments.length > 0; } // pending getPendingSegmentsLength() { return this.pendingSegments.length; } _handleUpdatePlaybackRate() { if (!this.$videoElement) { return; } const $video = this.$videoElement; const videoBuffer = this.player._opt.videoBuffer; const videoBufferDelay = this.player._opt.videoBufferDelay; let maxDelay = (videoBuffer + videoBufferDelay) / 1000; const ranges = $video.buffered; // 已缓冲的时间范围 ranges.length ? ranges.start(0) : 0; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; let time = $video.currentTime; const buffer = buffered - time; // not less than MSE_MAX_DELAY_TIME const maxDelayTime = Math.max(MSE_MAX_DELAY_TIME, maxDelay + MSE_DELAY_INCREASE_TIME); this.player.updateStats({ mseVideoBufferDelayTime: buffer }); if (buffer > maxDelayTime) { this.player.debug.log(this.TAG_NAME, `handleUpdatePlaybackRate and buffered is ${buffered} and current is ${time} , delay buffer is more than ${maxDelayTime} is ${buffer} and new time is ${buffered}`); $video.currentTime = buffered; time = $video.currentTime; } else if (buffer < 0) { this.player.debug.warn(this.TAG_NAME, `handleUpdatePlaybackRate and delay buffer is ${buffered} - current is ${time} = ${buffer} < 0 and check video is paused : ${$video.paused} `); // buffer if (buffered === 0) { this.player.emit(EVENTS_ERROR.mediaSourceBufferedIsZeroError, 'video.buffered is empty'); return; } // check if the video is paused if ($video.paused) { // if paused, play it $video.play(); } } const rate = this._getPlaybackRate(buffered - time); if ($video.playbackRate !== rate) { this.player.debug.log(this.TAG_NAME, `handleUpdatePlaybackRate and buffered is ${buffered} and current time is ${time} and delay is ${buffered - time} set playbackRate is ${rate} `); $video.playbackRate = rate; } } _handleUpdateBufferDelayTime() { const bufferDelayTime = this.getVideoBufferDelayTime(); this.player.updateStats({ mseVideoBufferDelayTime: bufferDelayTime }); } _doAppendSegments() { if (this.isStateClosed || this.isStateEnded) { return; } if (this.needInitAudio() && this.audioSourceBuffer === null) { this.player.debug.log(this.TAG_NAME, '_doAppendSegments() audioSourceBuffer is null and need init audio source buffer'); if (this.audioSourceBufferCheckTimeout === null) { this.audioSourceBufferCheckTimeout = setTimeout(() => { this._clearAudioSourceBufferCheckTimeout(); this.player.emit(EVENTS_ERROR.mediaSourceAudioInitTimeout); }, 1000); } return; } if (isFalse(this.getSourceBufferUpdating())) { // video if (this.pendingSegments.length > 0) { const segment = this.pendingSegments.shift(); try { this.sourceBuffer.appendBuffer(segment); } catch (e) { this.player.debug.error(this.TAG_NAME, 'this.sourceBuffer.appendBuffer()', e.code, e); if (e.code === 22) { // QuotaExceededError // The SourceBuffer is full, and cannot free space to append additional buffers this.stop(); this.mediaSourceAppendBufferFull = true; this.player.emitError(EVENTS_ERROR.mediaSourceFull); } else if (e.code === 11) { // Failed to execute 'appendBuffer' on 'SourceBuffer': The HTMLMediaElement.error attribute is not null. this.stop(); this.mediaSourceAppendBufferError = true; this.player.emitError(EVENTS_ERROR.mediaSourceAppendBufferError); } else { this.stop(); this.mediaSourceBufferError = true; this.player.emitError(EVENTS.mseSourceBufferError, e); } } } } if (isFalse(this.getAudioSourceBufferUpdating())) { // audio if (this.pendingAudioSegments.length > 0) { const segment = this.pendingAudioSegments.shift(); try { this.audioSourceBuffer.appendBuffer(segment); } catch (e) { this.player.debug.error(this.TAG_NAME, 'this.audioSourceBuffer.appendBuffer()', e.code, e); if (e.code === 22) { // QuotaExceededError // The SourceBuffer is full, and cannot free space to append additional buffers this.stop(); this.mediaSourceAppendBufferFull = true; this.player.emitError(EVENTS_ERROR.mediaSourceFull); } else if (e.code === 11) { // Failed to execute 'appendBuffer' on 'SourceBuffer': The HTMLMediaElement.error attribute is not null. this.stop(); this.mediaSourceAppendBufferError = true; this.player.emitError(EVENTS_ERROR.mediaSourceAppendBufferError); } else { this.stop(); this.mediaSourceBufferError = true; this.player.emitError(EVENTS.mseSourceBufferError, e); } } } } } _doCleanUpSourceBuffer() { if (!this.$videoElement) { return; } const $video = this.$videoElement; const currentTime = $video.currentTime; // video if (this.sourceBuffer) { const buffered = this.sourceBuffer.buffered; let doRemove = false; for (let i = 0; i < buffered.length; i++) { let start = buffered.start(i); let end = buffered.end(i); if (start <= currentTime && currentTime < end + 3) { // padding 3 seconds if (currentTime - start >= this.player._opt.mseAutoCleanupMaxBackwardDuration) { doRemove = true; let removeEnd = currentTime - this.player._opt.mseAutoCleanupMinBackwardDuration; this.pendingRemoveRanges.push({ start: start, end: removeEnd }); } } else if (end < currentTime) { doRemove = true; this.pendingRemoveRanges.push({ start: start, end: end }); } } if (doRemove && isFalse(this.getSourceBufferUpdating())) { this._doRemoveRanges(); } } // audio if (this.audioSourceBuffer) { const buffered = this.audioSourceBuffer.buffered; let doRemove = false; for (let i = 0; i < buffered.length; i++) { let start = buffered.start(i); let end = buffered.end(i); if (start <= currentTime && currentTime < end + 3) { // padding 3 seconds if (currentTime - start >= this.player._opt.mseAutoCleanupMaxBackwardDuration) { doRemove = true; let removeEnd = currentTime - this.player._opt.mseAutoCleanupMinBackwardDuration; this.pendingAudioRemoveRanges.push({ start: start, end: removeEnd }); } } else if (end < currentTime) { doRemove = true; this.pendingAudioRemoveRanges.push({ start: start, end: end }); } } if (doRemove && isFalse(this.getAudioSourceBufferUpdating())) { this._doRemoveRanges(); } } } _hasPendingRemoveRanges() { return this.pendingRemoveRanges.length > 0 || this.pendingAudioRemoveRanges.length > 0; } _doRemoveRanges() { if (this.sourceBuffer && isFalse(this.getSourceBufferUpdating())) { let ranges = this.pendingRemoveRanges; while (ranges.length && isFalse(this.getSourceBufferUpdating())) { let range = ranges.shift(); try { this.sourceBuffer.remove(range.start, range.end); } catch (e) { this.player.debug.warn(this.TAG_NAME, '_doRemoveRanges() sourceBuffer error', e); } } } if (this.audioSourceBuffer && isFalse(this.getAudioSourceBufferUpdating())) { let ranges = this.pendingAudioRemoveRanges; while (ranges.length && isFalse(this.getAudioSourceBufferUpdating())) { let range = ranges.shift(); try { this.audioSourceBuffer.remove(range.start, range.end); } catch (e) { this.player.debug.warn(this.TAG_NAME, '_doRemoveRanges() audioSourceBuffer error', e); } } } } getDecodePlaybackRate() { let result = 0; const $video = this.$videoElement; if ($video) { result = $video.playbackRate; } return result; } _getPlaybackRate(buffer) { const $video = this.$videoElement; let videoBufferDelay = this.player._opt.videoBufferDelay + this.player._opt.videoBuffer; // not less than 1000ms const maxDelay = Math.max(videoBufferDelay, 1000); const minDelay = maxDelay / 2; // s -> ms buffer = buffer * 1000; switch ($video.playbackRate) { case 1: if (buffer > maxDelay) { return 1.2; } return 1; default: if (buffer <= minDelay) { return 1; } return $video.playbackRate; } } _needCleanupSourceBuffer() { if (isFalse(this.player._opt.mseAutoCleanupSourceBuffer) || !this.$videoElement) { return false; } const $video = this.$videoElement; const buffered = $video.buffered; const currentTime = $video.currentTime; if (buffered.length >= 1) { if (currentTime - buffered.start(0) >= this.player._opt.mseAutoCleanupMaxBackwardDuration) { return true; } } return false; } getVideoCurrentTime() { let result = 0; if (this.$videoElement) { result = this.$videoElement.currentTime; } return result; } getVideoBufferLastTime() { const $video = this.$videoElement; let result = 0; if ($video) { const ranges = $video.buffered; // 已缓冲的时间范围 ranges.length ? ranges.start(0) : 0; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; result = buffered; } return result; } getVideoBufferDelayTime() { const $video = this.$videoElement; const buffered = this.getVideoBufferLastTime(); let time = $video.currentTime; const buffer = buffered - time; return buffer > 0 ? buffer : 0; } _clearAudioSourceBufferCheckTimeout() { if (this.audioSourceBufferCheckTimeout) { clearTimeout(this.audioSourceBufferCheckTimeout); this.audioSourceBufferCheckTimeout = null; } } // check has audio data _clearAudioNoDataCheckTimeout() { if (this.audioSourceNoDataCheckTimeout) { clearTimeout(this.audioSourceNoDataCheckTimeout); this.audioSourceNoDataCheckTimeout = null; } } } // tks: https://github.com/richtr/NoSleep.js const WEBM = "data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwEAAAAAABLfEU2bdLpNu4tTq4QVSalmU6yBoU27i1OrhBZUrmtTrIHYTbuMU6uEElTDZ1OsggGXTbuMU6uEHFO7a1OsghLJ7AEAAAAAAABZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVSalmsirXsYMPQkBNgI1MYXZmNTguNDUuMTAwV0GNTGF2ZjU4LjQ1LjEwMESJiECzmgAAAAAAFlSua0C5rgEAAAAAAABO14EBc8WI9UhIq9EDJPCcgQAitZyDdW5khoVWX1ZQOIOBASPjg4QF9eEA4AEAAAAAAAAbsIIBQLqB8FSwggElVLqB8FWwiFW3gQFVuIECrgEAAAAAAABZ14ECc8WIUEWPA9J/iJ6cgQAitZyDdW5khoZBX09QVVNWqoNjLqBWu4QExLQAg4EC4ZGfgQG1iEDncAAAAAAAYmSBIGOik09wdXNIZWFkAQE4AYC7AAAAAAASVMNnQcJzcwEAAAAAAACXY8CAZ8gBAAAAAAAAFUWji01BSk9SX0JSQU5ERIeEaXNvbWfIAQAAAAAAABZFo41NSU5PUl9WRVJTSU9ORIeDNTEyZ8gBAAAAAAAAJ0WjkUNPTVBBVElCTEVfQlJBTkRTRIeQaXNvbWlzbzJhdmMxbXA0MWfIAQAAAAAAABpFo4dFTkNPREVSRIeNTGF2ZjU4LjQ1LjEwMHNzAQAAAAAAAIZjwItjxYj1SEir0QMk8GfIAQAAAAAAAB5Fo4xIQU5ETEVSX05BTUVEh4xWaWRlb0hhbmRsZXJnyAEAAAAAAAAhRaOHRU5DT0RFUkSHlExhdmM1OC45MS4xMDAgbGlidnB4Z8iiRaOIRFVSQVRJT05Eh5QwMDowMDowNS4wMDcwMDAwMDAAAHNzAQAAAAAAAIdjwItjxYhQRY8D0n+InmfIAQAAAAAAAB5Fo4xIQU5ETEVSX05BTUVEh4xTb3VuZEhhbmRsZXJnyAEAAAAAAAAiRaOHRU5DT0RFUkSHlUxhdmM1OC45MS4xMDAgbGlib3B1c2fIokWjiERVUkFUSU9ORIeUMDA6MDA6MDUuMDE4MDAwMDAwAAAfQ7Z1T2TngQCjh4IAAID4//6jQKSBAAeAMBIAnQEqQAHwAABHCIWFiIWEiAICAAYWBPcGgWSfa9ubJzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh7Jzh69AD+/6tQgKOHggAVgPj//qOHggApgPj//qOHggA9gPj//qOHggBRgPj//qOHggBlgPj//qOegQBrANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCAHmA+P/+o4eCAI2A+P/+o4eCAKGA+P/+o4eCALWA+P/+o4eCAMmA+P/+o56BAM8A0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IA3YD4//6jh4IA8YD4//6jh4IBBYD4//6jh4IBGYD4//6jh4IBLYD4//6jnoEBMwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggFBgPj//qOHggFVgPj//qOHggFpgPj//qOHggF9gPj//qOHggGRgPj//qOegQGXANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCAaWA+P/+o4eCAbmA+P/+o4eCAc2A+P/+o4eCAeGA+P/+o4eCAfWA+P/+o56BAfsA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4ICCYD4//6jh4ICHYD4//6jh4ICMYD4//6jh4ICRYD4//6jh4ICWYD4//6jnoECXwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggJtgPj//qOHggKBgPj//qOHggKVgPj//qOHggKpgPj//qOHggK9gPj//qOegQLDANECAAUQEBRgAGFgv9AAIgAQzX61yT5xzAAAo4eCAtGA+P/+o4eCAuWA+P/+o4eCAvmA+P/+o4eCAw2A+P/+o4eCAyGA+P/+o56BAycA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IDNYD4//6jh4IDSYD4//6jh4IDXYD4//6jh4IDcYD4//6jh4IDhYD4//6jnoEDiwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggOZgPj//qOHggOtgPj//qOHggPBgPj//qOHggPVgPj//qOHggPpgPj//qOegQPvANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCA/2A+P/+o4eCBBGA+P/+o4eCBCWA+P/+o4eCBDmA+P/+o4eCBE2A+P/+o56BBFMA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IEiID4//6jh4IEnID4//6jh4IEsID4//6jnoEEtwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggTEgPj//qOHggTYgPj//qOHggTsgPj//qOHggUAgPj//qOHggUUgPj//qOegQUbANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCBSiA+P/+o4eCBTyA+P/+o4eCBVCA+P/+o4eCBWSA+P/+o4eCBXiA+P/+o56BBX8A0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IFjID4//6jh4IFoID4//6jh4IFtID4//6jh4IFyID4//6jh4IF3ID4//6jnoEF4wDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggXwgPj//qOHggYEgPj//qOHggYYgPj//qOHggYsgPj//qOHggZAgPj//qOegQZHANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCBlSA+P/+o4eCBmiA+P/+o4eCBnyA+P/+o4eCBpCA+P/+o4eCBqSA+P/+o56BBqsA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IGuID4//6jh4IGzID4//6jh4IG4ID4//6jh4IG9ID4//6jh4IHCID4//6jnoEHDwDRAgAFEBAUYABhYL/QACIAEM1+tck+ccwAAKOHggccgPj//qOHggcwgPj//qOHggdEgPj//qOHggdYgPj//qOHggdsgPj//qOegQdzANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCB4CA+P/+o4eCB5SA+P/+o4eCB6iA+P/+o4eCB7yA+P/+o4eCB9CA+P/+o56BB9cA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IH5ID4//6jh4IH+ID4//6jh4IIDID4//6jh4IIIID4//6jh4IINID4//6jnoEIOwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHgghIgPj//qOHgghcgPj//qOHgghwgPj//qOHggiEgPj//qOegQifANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCCMCA+P/+o4eCCNSA+P/+o4eCCOiA+P/+o4eCCPyA+P/+o56BCQMA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IJEID4//6jh4IJJID4//6jh4IJOID4//6jh4IJTID4//6jh4IJYID4//6jnoEJZwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggl0gPj//qOHggmIgPj//qOHggmcgPj//qOHggmwgPj//qOHggnEgPj//qOegQnLANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCCdiA+P/+o4eCCeyA+P/+o4eCCgCA+P/+o4eCChSA+P/+o4eCCiiA+P/+o56BCi8A0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IKPID4//6jh4IKUID4//6jh4IKZID4//6jh4IKeID4//6jh4IKjID4//6jnoEKkwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggqggPj//qOHggq0gPj//qOHggrIgPj//qOHggrcgPj//qOHggrwgPj//qOegQr3ANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCCwSA+P/+o4eCCxiA+P/+o4eCCyyA+P/+o4eCC0CA+P/+o4eCC1SA+P/+o56BC1sA0QIABRAQFGAAYWC/0AAiABDNfrXJPnHMAACjh4ILaID4//6jh4ILfID4//6jh4ILkID4//6jh4ILpID4//6jh4ILuID4//6jnoELvwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHggvMgPj//qOHggvggPj//qOHggv0gPj//qOHggwIgPj//qOHggwcgPj//qOegQwjANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCDDCA+P/+o4eCDESA+P/+o4eCDFiA+P/+o4eCDGyA+P/+o4eCDICA+P/+o56BDIcA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IMlID4//6jh4IMqID4//6jh4IMvID4//6jh4IM0ID4//6jnoEM6wDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHgg0MgPj//qOHgg0ggPj//qOHgg00gPj//qOHgg1IgPj//qOegQ1PANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCDVyA+P/+o4eCDXCA+P/+o4eCDYSA+P/+o4eCDZiA+P/+o4eCDayA+P/+o56BDbMA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4INwID4//6jh4IN1ID4//6jh4IN6ID4//6jh4IN/ID4//6jh4IOEID4//6jnoEOFwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHgg4kgPj//qOHgg44gPj//qOHgg5MgPj//qOHgg5ggPj//qOHgg50gPj//qOegQ57ANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCDoiA+P/+o4eCDpyA+P/+o4eCDrCA+P/+o4eCDsSA+P/+o4eCDtiA+P/+o56BDt8A0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IO7ID4//6jh4IPAID4//6jh4IPFID4//6jh4IPKID4//6jh4IPPID4//6jnoEPQwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHgg9QgPj//qOHgg9kgPj//qOHgg94gPj//qOHgg+MgPj//qOHgg+ggPj//qOegQ+nANECAAUQEBRgAGFgv9AAIgAQzX61yT5xzAAAo4eCD7SA+P/+o4eCD8iA+P/+o4eCD9yA+P/+o4eCD/CA+P/+o4eCEASA+P/+o56BEAsA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IQGID4//6jh4IQLID4//6jh4IQQID4//6jh4IQVID4//6jh4IQaID4//6jnoEQbwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHghB8gPj//qOHghCQgPj//qOHghCkgPj//qOHghC4gPj//qOHghDMgPj//qOegRDTANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCEOCA+P/+o4eCEPSA+P/+o4eCEQiA+P/+o56BETcA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4IRQ4D4//6jh4IRV4D4//6jh4IRa4D4//6jh4IRf4D4//6jh4IRk4D4//6jnoERmwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHghGngPj//qOHghG7gPj//qOHghHPgPj//qOHghHjgPj//qOHghH3gPj//qOegRH/ANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCEguA+P/+o4eCEh+A+P/+o4eCEjOA+P/+o4eCEkeA+P/+o4eCEluA+P/+o56BEmMA0QIABRCsABgAGFgv9AAIgAQzX61yT5xzAACjh4ISb4D4//6jh4ISg4D4//6jh4ISl4D4//6jh4ISq4D4//6jh4ISv4D4//6jnoESxwDRAgAFEKwAGAAYWC/0AAiABDNfrXJPnHMAAKOHghLTgPj//qOHghLngPj//qOHghL7gPj//qOHghMPgPj//qOHghMjgPj//qOegRMrANECAAUQrAAYABhYL/QACIAEM1+tck+ccwAAo4eCEzeA+P/+o4eCE0uA+P/+o4eCE1+A+P/+o4eCE3OA+P/+oAEAAAAAAAAPoYeCE4cA+P/+daKDB/KBHFO7a5G7j7OBB7eK94EB8YIDX/CBDA=="; const MP4$1 = "data:video/mp4;base64,AAAAIGZ0eXBpc29tAAACAGlzb21pc28yYXZjMW1wNDEAAAAIZnJlZQAACORtZGF03gIATGF2YzU4LjM1LjEwMAACMEAOAAACcQYF//9t3EXpvebZSLeWLNgg2SPu73gyNjQgLSBjb3JlIDE2MSByMzAyNyA0MTIxMjc3IC0gSC4yNjQvTVBFRy00IEFWQyBjb2RlYyAtIENvcHlsZWZ0IDIwMDMtMjAyMCAtIGh0dHA6Ly93d3cudmlkZW9sYW4ub3JnL3gyNjQuaHRtbCAtIG9wdGlvbnM6IGNhYmFjPTAgcmVmPTMgZGVibG9jaz0xOjA6MCBhbmFseXNlPTB4MToweDExMSBtZT1oZXggc3VibWU9NyBwc3k9MSBwc3lfcmQ9MS4wMDowLjAwIG1peGVkX3JlZj0xIG1lX3JhbmdlPTE2IGNocm9tYV9tZT0xIHRyZWxsaXM9MSA4eDhkY3Q9MCBjcW09MCBkZWFkem9uZT0yMSwxMSBmYXN0X3Bza2lwPTEgY2hyb21hX3FwX29mZnNldD0tMiB0aHJlYWRzPTcgbG9va2FoZWFkX3RocmVhZHM9MSBzbGljZWRfdGhyZWFkcz0wIG5yPTAgZGVjaW1hdGU9MSBpbnRlcmxhY2VkPTAgYmx1cmF5X2NvbXBhdD0wIGNvbnN0cmFpbmVkX2ludHJhPTAgYmZyYW1lcz0wIHdlaWdodHA9MCBrZXlpbnQ9MjUwIGtleWludF9taW49MTAgc2NlbmVjdXQ9NDAgaW50cmFfcmVmcmVzaD0wIHJjX2xvb2thaGVhZD00MCByYz1jcmYgbWJ0cmVlPTEgY3JmPTIzLjAgcWNvbXA9MC42MCBxcG1pbj0wIHFwbWF4PTY5IHFwc3RlcD00IGlwX3JhdGlvPTEuNDAgYXE9MToxLjAwAIAAAADvZYiED/JigADD7JycnJycnJycnJycnJycnJycnJ11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111114BGCAHARggBwEYIAcBGCAHARggBwAAAAdBmjgf4BLYARggBwEYIAcBGCAHARggBwAAAAdBmlQH+AS2ARggBwEYIAcBGCAHARggBwAAAAdBmmA/wCWwARggBwEYIAcBGCAHARggBwEYIAcAAAAHQZqAP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZqgP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZrAP8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0Ga4D/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GbAD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GbID/AJbABGCAHARggBwEYIAcBGCAHARggBwAAAAdBm0A/wCWwARggBwEYIAcBGCAHAAAAB0GbYD/AJbABGCAHARggBwEYIAcAAAAHQZuAP8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0GboD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GbwD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0Gb4D/AJbABGCAHARggBwEYIAcBGCAHARggBwAAAAdBmgA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmiA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmkA/wCWwARggBwEYIAcBGCAHARggBwEYIAcAAAAHQZpgP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZqAP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZqgP8AlsAEYIAcBGCAHARggBwAAAAdBmsA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmuA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmwA/wCWwARggBwEYIAcBGCAHARggBwEYIAcAAAAHQZsgP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZtAP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZtgP8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0GbgD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GboD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GbwD/AJbABGCAHARggBwEYIAcBGCAHARggBwAAAAdBm+A/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmgA/wCWwARggBwEYIAcAAAAHQZogP8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0GaQD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GaYD/AJbABGCAHARggBwEYIAcBGCAHAAAAB0GagD/AJbABGCAHARggBwEYIAcBGCAHARggBwAAAAdBmqA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmsA/wCWwARggBwEYIAcBGCAHARggBwAAAAdBmuA/wCWwARggBwEYIAcBGCAHARggBwEYIAcAAAAHQZsAP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZsgP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZtAP8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0GbYD/AJbABGCAHARggBwAAAAdBm4A/wCWwARggBwEYIAcBGCAHARggBwAAAAdBm6A/wCWwARggBwEYIAcBGCAHARggBwEYIAcAAAAHQZvAP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZvgP8AlsAEYIAcBGCAHARggBwEYIAcAAAAHQZoAO8AlsAEYIAcBGCAHARggBwEYIAcBGCAHAAAAB0GaIDfAJbABGCAHARggBwEYIAcBGCAHAAAMxm1vb3YAAABsbXZoZAAAAAAAAAAAAAAAAAAAA+gAABOgAAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAAPLdHJhawAAAFx0a2hkAAAAAwAAAAAAAAAAAAAAAQAAAAAAABOIAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAElVVUA8AAAAAAAJGVkdHMAAAAcZWxzdAAAAAAAAAABAAATiAAAAAAAAQAAAAADQ21kaWEAAAAgbWRoZAAAAAAAAAAAAAAAAAAAKAAAAMgAVcQAAAAAAC1oZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAVmlkZW9IYW5kbGVyAAAAAu5taW5mAAAAFHZtaGQAAAABAAAAAAAAAAAAAAAkZGluZgAAABxkcmVmAAAAAAAAAAEAAAAMdXJsIAAAAAEAAAKuc3RibAAAAKpzdHNkAAAAAAAAAAEAAACaYXZjMQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAFAAPAASAAAAEgAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABj//wAAADRhdmNDAULAC//hABxnQsAL2QFB+/8ACwAMEAAAAwAQAAADAUDxQqSAAQAFaMuDyyAAAAAQcGFzcAAAAAsAAAAMAAAAGHN0dHMAAAAAAAAAAQAAADIAAAQAAAAAFHN0c3MAAAAAAAAAAQAAAAEAAAAcc3RzYwAAAAAAAAABAAAAAQAAAAEAAAABAAAA3HN0c3oAAAAAAAAAAAAAADIAAANoAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAAAsAAAALAAAACwAAANhzdGNvAAAAAAAAADIAAABFAAADwQAAA9wAAAP3AAAEFgAABDEAAARMAAAEawAABIYAAAShAAAEwAAABNcAAATuAAAFDQAABSgAAAVDAAAFYgAABX0AAAWYAAAFtwAABdIAAAXtAAAGBAAABh8AAAY6AAAGWQAABnQAAAaPAAAGrgAABskAAAbkAAAHAwAABx4AAAcxAAAHUAAAB2sAAAeGAAAHpQAAB8AAAAfbAAAH+gAACBUAAAgwAAAITwAACGIAAAh9AAAInAAACLcAAAjSAAAI8QAACCV0cmFrAAAAXHRraGQAAAADAAAAAAAAAAAAAAACAAAAAAAAE6AAAAAAAAAAAAAAAAEBAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAkZWR0cwAAABxlbHN0AAAAAAAAAAEAABOIAAAEAAABAAAAAAedbWRpYQAAACBtZGhkAAAAAAAAAAAAAAAAAACsRAADYVRVxAAAAAAALWhkbHIAAAAAAAAAAHNvdW4AAAAAAAAAAAAAAABTb3VuZEhhbmRsZXIAAAAHSG1pbmYAAAAQc21oZAAAAAAAAAAAAAAAJGRpbmYAAAAcZHJlZgAAAAAAAAABAAAADHVybCAAAAABAAAHDHN0YmwAAABqc3RzZAAAAAAAAAABAAAAWm1wNGEAAAAAAAAAAQAAAAAAAAAAAAIAEAAAAACsRAAAAAAANmVzZHMAAAAAA4CAgCUAAgAEgICAF0AVAAAAAAENiAAABVQFgICABRIIVuUABoCAgAECAAAAYHN0dHMAAAAAAAAACgAAAC8AAAQAAAAAAQAACtUAAAAsAAAEAAAAAAEAAArWAAAALAAABAAAAAABAAAK1QAAACwAAAQAAAAAAQAACtUAAAAaAAAEAAAAAAEAAAH/AAABzHN0c2MAAAAAAAAAJQAAAAEAAAABAAAAAQAAAAIAAAAFAAAAAQAAAAMAAAAEAAAAAQAAAAUAAAAFAAAAAQAAAAYAAAAEAAAAAQAAAAgAAAAFAAAAAQAAAAkAAAAEAAAAAQAAAAsAAAAFAAAAAQAAAAwAAAADAAAAAQAAAA4AAAAFAAAAAQAAAA8AAAAEAAAAAQAAABEAAAAFAAAAAQAAABIAAAAEAAAAAQAAABQAAAAFAAAAAQAAABUAAAAEAAAAAQAAABcAAAADAAAAAQAAABgAAAAEAAAAAQAAABoAAAAFAAAAAQAAABsAAAAEAAAAAQAAAB0AAAAFAAAAAQAAAB4AAAAEAAAAAQAAACAAAAAFAAAAAQAAACEAAAAEAAAAAQAAACIAAAACAAAAAQAAACMAAAAFAAAAAQAAACQAAAAEAAAAAQAAACYAAAAFAAAAAQAAACcAAAAEAAAAAQAAACkAAAAFAAAAAQAAACoAAAAEAAAAAQAAACwAAAAFAAAAAQAAAC0AAAACAAAAAQAAAC4AAAAEAAAAAQAAAC8AAAAFAAAAAQAAADAAAAAEAAAAAQAAADIAAAAFAAAAAQAAADMAAAAEAAAAAQAAA1xzdHN6AAAAAAAAAAAAAADSAAAAFQAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAA3HN0Y28AAAAAAAAAMwAAADAAAAOtAAADzAAAA+cAAAQCAAAEIQAABDwAAARXAAAEdgAABJEAAASsAAAEywAABOIAAAT5AAAFGAAABTMAAAVOAAAFbQAABYgAAAWjAAAFwgAABd0AAAX4AAAGDwAABioAAAZFAAAGZAAABn8AAAaaAAAGuQAABtQAAAbvAAAHDgAABykAAAc8AAAHWwAAB3YAAAeRAAAHsAAAB8sAAAfmAAAIBQAACCAAAAg7AAAIWgAACG0AAAiIAAAIpwAACMIAAAjdAAAI/AAAABpzZ3BkAQAAAHJvbGwAAAACAAAAAf//AAAAHHNiZ3AAAAAAcm9sbAAAAAEAAADSAAAAAQAAAGJ1ZHRhAAAAWm1ldGEAAAAAAAAAIWhkbHIAAAAAAAAAAG1kaXJhcHBsAAAAAAAAAAAAAAAALWlsc3QAAAAlqXRvbwAAAB1kYXRhAAAAAQAAAABMYXZmNTguMjAuMTAw"; // Detect native Wake Lock API support const nativeWakeLock = () => "wakeLock" in navigator && window.navigator.userAgent.indexOf("Samsung") === -1 && isFalse(isIOS()); class NoSleep { constructor(player) { this.player = player; this.enabled = false; if (nativeWakeLock()) { this.player.debug.log('NoSleep', 'Native Wake Lock API supported.'); this._wakeLock = null; this.handleVisibilityChange = () => { if (this._wakeLock !== null && document.visibilityState === "visible") { this.enable(); } }; document.addEventListener("visibilitychange", this.handleVisibilityChange); document.addEventListener("fullscreenchange", this.handleVisibilityChange); } else { this.player.debug.log('NoSleep', 'Native Wake Lock API not supported. so use video element.'); // Set up no sleep video element this.noSleepVideo = document.createElement("video"); this.noSleepVideo.setAttribute("title", "No Sleep"); this.noSleepVideo.setAttribute("playsinline", ""); this._addSourceToVideo(this.noSleepVideo, "webm", WEBM); this._addSourceToVideo(this.noSleepVideo, "mp4", MP4$1); Object.assign(this.noSleepVideo.style, { position: "absolute", left: "-100%", top: "-100%" }); document.querySelector('body').append(this.noSleepVideo); this.handleNoSleepVideoTimeUpdate = () => { if (this.noSleepVideo && this.noSleepVideo.currentTime > 4) { this.noSleepVideo.currentTime = 1; } }; this.noSleepVideo.addEventListener('timeupdate', this.handleNoSleepVideoTimeUpdate); } } destroy() { if (this._wakeLock) { this._wakeLock.release(); this._wakeLock = null; } if (this.noSleepVideo) { if (this.handleNoSleepVideoTimeUpdate) { this.noSleepVideo.removeEventListener('timeupdate', this.handleNoSleepVideoTimeUpdate); } try { if (this.noSleepVideo.parentNode) { this.noSleepVideo.parentNode.removeChild(this.noSleepVideo); } } catch (e) { this.player.debug.warn('NoSleep', 'Failed to remove noSleepVideo element.'); } this.noSleepVideo = null; } if (this.handleVisibilityChange) { document.removeEventListener("visibilitychange", this.handleVisibilityChange); document.removeEventListener("fullscreenchange", this.handleVisibilityChange); } } _addSourceToVideo(element, type, dataURI) { var source = document.createElement("source"); source.src = dataURI; source.type = `video/${type}`; element.appendChild(source); } get isEnabled() { return this.enabled; } enable() { const debug = this.player.debug; if (nativeWakeLock()) { return navigator.wakeLock.request("screen").then(wakeLock => { this._wakeLock = wakeLock; this.enabled = true; debug.log('wakeLock', 'Wake Lock active.'); this._wakeLock.addEventListener("release", () => { // ToDo: Potentially emit an event for the page to observe since // Wake Lock releases happen when page visibility changes. // (https://web.dev/wakelock/#wake-lock-lifecycle) debug.log('wakeLock', 'Wake Lock released.'); }); }).catch(err => { this.enabled = false; debug.warn('wakeLock', `${err.name}, ${err.message}`); throw err; }); } else { let playPromise = this.noSleepVideo.play(); return playPromise.then(res => { debug.log('wakeLock', 'noSleepVideo Wake Lock active.'); this.enabled = true; return res; }).catch(err => { debug.warn('wakeLock', `noSleepVideo ${err.name}, ${err.message}`); this.enabled = false; throw err; }); } } disable() { if (nativeWakeLock()) { if (this._wakeLock) { this._wakeLock.release(); } this._wakeLock = null; } else { if (this.noSleepVideo) { this.noSleepVideo.pause(); } } this.enabled = false; this.player.debug.log('wakeLock', 'Disabling wake lock.'); } } function getDefaultExportFromCjs (x) { return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } var urlToolkit = {exports: {}}; (function (module, exports) { // see https://tools.ietf.org/html/rfc1808 (function (root) { var URL_REGEX = /^(?=((?:[a-zA-Z0-9+\-.]+:)?))\1(?=((?:\/\/[^\/?#]*)?))\2(?=((?:(?:[^?#\/]*\/)*[^;?#\/]*)?))\3((?:;[^?#]*)?)(\?[^#]*)?(#[^]*)?$/; var FIRST_SEGMENT_REGEX = /^(?=([^\/?#]*))\1([^]*)$/; var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g; var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g; var URLToolkit = { // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or // // E.g // With opts.alwaysNormalize = false (default, spec compliant) // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g // With opts.alwaysNormalize = true (not spec compliant) // http://a.com/b/cd + /e/f/../g => http://a.com/e/g buildAbsoluteURL: function (baseURL, relativeURL, opts) { opts = opts || {}; // remove any remaining space and CRLF baseURL = baseURL.trim(); relativeURL = relativeURL.trim(); if (!relativeURL) { // 2a) If the embedded URL is entirely empty, it inherits the // entire base URL (i.e., is set equal to the base URL) // and we are done. if (!opts.alwaysNormalize) { return baseURL; } var basePartsForNormalise = URLToolkit.parseURL(baseURL); if (!basePartsForNormalise) { throw new Error('Error trying to parse base URL.'); } basePartsForNormalise.path = URLToolkit.normalizePath( basePartsForNormalise.path ); return URLToolkit.buildURLFromParts(basePartsForNormalise); } var relativeParts = URLToolkit.parseURL(relativeURL); if (!relativeParts) { throw new Error('Error trying to parse relative URL.'); } if (relativeParts.scheme) { // 2b) If the embedded URL starts with a scheme name, it is // interpreted as an absolute URL and we are done. if (!opts.alwaysNormalize) { return relativeURL; } relativeParts.path = URLToolkit.normalizePath(relativeParts.path); return URLToolkit.buildURLFromParts(relativeParts); } var baseParts = URLToolkit.parseURL(baseURL); if (!baseParts) { throw new Error('Error trying to parse base URL.'); } if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') { // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a' var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path); baseParts.netLoc = pathParts[1]; baseParts.path = pathParts[2]; } if (baseParts.netLoc && !baseParts.path) { baseParts.path = '/'; } var builtParts = { // 2c) Otherwise, the embedded URL inherits the scheme of // the base URL. scheme: baseParts.scheme, netLoc: relativeParts.netLoc, path: null, params: relativeParts.params, query: relativeParts.query, fragment: relativeParts.fragment, }; if (!relativeParts.netLoc) { // 3) If the embedded URL's is non-empty, we skip to // Step 7. Otherwise, the embedded URL inherits the // (if any) of the base URL. builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the // path is not relative and we skip to Step 7. if (relativeParts.path[0] !== '/') { if (!relativeParts.path) { // 5) If the embedded URL path is empty (and not preceded by a // slash), then the embedded URL inherits the base URL path builtParts.path = baseParts.path; // 5a) if the embedded URL's is non-empty, we skip to // step 7; otherwise, it inherits the of the base // URL (if any) and if (!relativeParts.params) { builtParts.params = baseParts.params; // 5b) if the embedded URL's is non-empty, we skip to // step 7; otherwise, it inherits the of the base // URL (if any) and we skip to step 7. if (!relativeParts.query) { builtParts.query = baseParts.query; } } } else { // 6) The last segment of the base URL's path (anything // following the rightmost slash "/", or the entire path if no // slash is present) is removed and the embedded URL's path is // appended in its place. var baseURLPath = baseParts.path; var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path; builtParts.path = URLToolkit.normalizePath(newPath); } } } if (builtParts.path === null) { builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path; } return URLToolkit.buildURLFromParts(builtParts); }, parseURL: function (url) { var parts = URL_REGEX.exec(url); if (!parts) { return null; } return { scheme: parts[1] || '', netLoc: parts[2] || '', path: parts[3] || '', params: parts[4] || '', query: parts[5] || '', fragment: parts[6] || '', }; }, normalizePath: function (path) { // The following operations are // then applied, in order, to the new path: // 6a) All occurrences of "./", where "." is a complete path // segment, are removed. // 6b) If the path ends with "." as a complete path segment, // that "." is removed. path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "/../", where is a // complete path segment not equal to "..", are removed. // Removal of these path segments is performed iteratively, // removing the leftmost matching pattern on each iteration, // until no matching pattern remains. // 6d) If the path ends with "/..", where is a // complete path segment not equal to "..", that // "/.." is removed. while ( path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length ) {} return path.split('').reverse().join(''); }, buildURLFromParts: function (parts) { return ( parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment ); }, }; module.exports = URLToolkit; })(); } (urlToolkit)); var urlToolkitExports = urlToolkit.exports; function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } function _objectSpread2(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _extends() { _extends = Object.assign ? Object.assign.bind() : function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } const isFiniteNumber = Number.isFinite || function (value) { return typeof value === 'number' && isFinite(value); }; let Events = /*#__PURE__*/function (Events) { Events["MEDIA_ATTACHING"] = "hlsMediaAttaching"; Events["MEDIA_ATTACHED"] = "hlsMediaAttached"; Events["MEDIA_DETACHING"] = "hlsMediaDetaching"; Events["MEDIA_DETACHED"] = "hlsMediaDetached"; Events["BUFFER_RESET"] = "hlsBufferReset"; Events["BUFFER_CODECS"] = "hlsBufferCodecs"; Events["BUFFER_CREATED"] = "hlsBufferCreated"; Events["BUFFER_APPENDING"] = "hlsBufferAppending"; Events["BUFFER_APPENDED"] = "hlsBufferAppended"; Events["BUFFER_EOS"] = "hlsBufferEos"; Events["BUFFER_FLUSHING"] = "hlsBufferFlushing"; Events["BUFFER_FLUSHED"] = "hlsBufferFlushed"; Events["MANIFEST_LOADING"] = "hlsManifestLoading"; Events["MANIFEST_LOADED"] = "hlsManifestLoaded"; Events["MANIFEST_PARSED"] = "hlsManifestParsed"; Events["LEVEL_SWITCHING"] = "hlsLevelSwitching"; Events["LEVEL_SWITCHED"] = "hlsLevelSwitched"; Events["LEVEL_LOADING"] = "hlsLevelLoading"; Events["LEVEL_LOADED"] = "hlsLevelLoaded"; Events["LEVEL_UPDATED"] = "hlsLevelUpdated"; Events["LEVEL_PTS_UPDATED"] = "hlsLevelPtsUpdated"; Events["LEVELS_UPDATED"] = "hlsLevelsUpdated"; Events["AUDIO_TRACKS_UPDATED"] = "hlsAudioTracksUpdated"; Events["AUDIO_TRACK_SWITCHING"] = "hlsAudioTrackSwitching"; Events["AUDIO_TRACK_SWITCHED"] = "hlsAudioTrackSwitched"; Events["AUDIO_TRACK_LOADING"] = "hlsAudioTrackLoading"; Events["AUDIO_TRACK_LOADED"] = "hlsAudioTrackLoaded"; Events["SUBTITLE_TRACKS_UPDATED"] = "hlsSubtitleTracksUpdated"; Events["SUBTITLE_TRACKS_CLEARED"] = "hlsSubtitleTracksCleared"; Events["SUBTITLE_TRACK_SWITCH"] = "hlsSubtitleTrackSwitch"; Events["SUBTITLE_TRACK_LOADING"] = "hlsSubtitleTrackLoading"; Events["SUBTITLE_TRACK_LOADED"] = "hlsSubtitleTrackLoaded"; Events["SUBTITLE_FRAG_PROCESSED"] = "hlsSubtitleFragProcessed"; Events["CUES_PARSED"] = "hlsCuesParsed"; Events["NON_NATIVE_TEXT_TRACKS_FOUND"] = "hlsNonNativeTextTracksFound"; Events["INIT_PTS_FOUND"] = "hlsInitPtsFound"; Events["FRAG_LOADING"] = "hlsFragLoading"; Events["FRAG_LOAD_EMERGENCY_ABORTED"] = "hlsFragLoadEmergencyAborted"; Events["FRAG_LOADED"] = "hlsFragLoaded"; Events["FRAG_DECRYPTED"] = "hlsFragDecrypted"; Events["FRAG_PARSING_INIT_SEGMENT"] = "hlsFragParsingInitSegment"; Events["FRAG_PARSING_USERDATA"] = "hlsFragParsingUserdata"; Events["FRAG_PARSING_METADATA"] = "hlsFragParsingMetadata"; Events["FRAG_PARSED"] = "hlsFragParsed"; Events["FRAG_BUFFERED"] = "hlsFragBuffered"; Events["FRAG_CHANGED"] = "hlsFragChanged"; Events["FPS_DROP"] = "hlsFpsDrop"; Events["FPS_DROP_LEVEL_CAPPING"] = "hlsFpsDropLevelCapping"; Events["ERROR"] = "hlsError"; Events["DESTROYING"] = "hlsDestroying"; Events["KEY_LOADING"] = "hlsKeyLoading"; Events["KEY_LOADED"] = "hlsKeyLoaded"; Events["LIVE_BACK_BUFFER_REACHED"] = "hlsLiveBackBufferReached"; Events["BACK_BUFFER_REACHED"] = "hlsBackBufferReached"; return Events; }({}); /** * Defines each Event type and payload by Event name. Used in {@link hls.js#HlsEventEmitter} to strongly type the event listener API. */ let ErrorTypes = /*#__PURE__*/function (ErrorTypes) { ErrorTypes["NETWORK_ERROR"] = "networkError"; ErrorTypes["MEDIA_ERROR"] = "mediaError"; ErrorTypes["KEY_SYSTEM_ERROR"] = "keySystemError"; ErrorTypes["MUX_ERROR"] = "muxError"; ErrorTypes["OTHER_ERROR"] = "otherError"; return ErrorTypes; }({}); let ErrorDetails = /*#__PURE__*/function (ErrorDetails) { ErrorDetails["KEY_SYSTEM_NO_KEYS"] = "keySystemNoKeys"; ErrorDetails["KEY_SYSTEM_NO_ACCESS"] = "keySystemNoAccess"; ErrorDetails["KEY_SYSTEM_NO_SESSION"] = "keySystemNoSession"; ErrorDetails["KEY_SYSTEM_NO_CONFIGURED_LICENSE"] = "keySystemNoConfiguredLicense"; ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"] = "keySystemLicenseRequestFailed"; ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED"] = "keySystemServerCertificateRequestFailed"; ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED"] = "keySystemServerCertificateUpdateFailed"; ErrorDetails["KEY_SYSTEM_SESSION_UPDATE_FAILED"] = "keySystemSessionUpdateFailed"; ErrorDetails["KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED"] = "keySystemStatusOutputRestricted"; ErrorDetails["KEY_SYSTEM_STATUS_INTERNAL_ERROR"] = "keySystemStatusInternalError"; ErrorDetails["MANIFEST_LOAD_ERROR"] = "manifestLoadError"; ErrorDetails["MANIFEST_LOAD_TIMEOUT"] = "manifestLoadTimeOut"; ErrorDetails["MANIFEST_PARSING_ERROR"] = "manifestParsingError"; ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"] = "manifestIncompatibleCodecsError"; ErrorDetails["LEVEL_EMPTY_ERROR"] = "levelEmptyError"; ErrorDetails["LEVEL_LOAD_ERROR"] = "levelLoadError"; ErrorDetails["LEVEL_LOAD_TIMEOUT"] = "levelLoadTimeOut"; ErrorDetails["LEVEL_PARSING_ERROR"] = "levelParsingError"; ErrorDetails["LEVEL_SWITCH_ERROR"] = "levelSwitchError"; ErrorDetails["AUDIO_TRACK_LOAD_ERROR"] = "audioTrackLoadError"; ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"] = "audioTrackLoadTimeOut"; ErrorDetails["SUBTITLE_LOAD_ERROR"] = "subtitleTrackLoadError"; ErrorDetails["SUBTITLE_TRACK_LOAD_TIMEOUT"] = "subtitleTrackLoadTimeOut"; ErrorDetails["FRAG_LOAD_ERROR"] = "fragLoadError"; ErrorDetails["FRAG_LOAD_TIMEOUT"] = "fragLoadTimeOut"; ErrorDetails["FRAG_DECRYPT_ERROR"] = "fragDecryptError"; ErrorDetails["FRAG_PARSING_ERROR"] = "fragParsingError"; ErrorDetails["FRAG_GAP"] = "fragGap"; ErrorDetails["REMUX_ALLOC_ERROR"] = "remuxAllocError"; ErrorDetails["KEY_LOAD_ERROR"] = "keyLoadError"; ErrorDetails["KEY_LOAD_TIMEOUT"] = "keyLoadTimeOut"; ErrorDetails["BUFFER_ADD_CODEC_ERROR"] = "bufferAddCodecError"; ErrorDetails["BUFFER_INCOMPATIBLE_CODECS_ERROR"] = "bufferIncompatibleCodecsError"; ErrorDetails["BUFFER_APPEND_ERROR"] = "bufferAppendError"; ErrorDetails["BUFFER_APPENDING_ERROR"] = "bufferAppendingError"; ErrorDetails["BUFFER_STALLED_ERROR"] = "bufferStalledError"; ErrorDetails["BUFFER_FULL_ERROR"] = "bufferFullError"; ErrorDetails["BUFFER_SEEK_OVER_HOLE"] = "bufferSeekOverHole"; ErrorDetails["BUFFER_NUDGE_ON_STALL"] = "bufferNudgeOnStall"; ErrorDetails["INTERNAL_EXCEPTION"] = "internalException"; ErrorDetails["INTERNAL_ABORTED"] = "aborted"; ErrorDetails["UNKNOWN"] = "unknown"; return ErrorDetails; }({}); const noop = function noop() {}; const fakeLogger = { trace: noop, debug: noop, log: noop, warn: noop, info: noop, error: noop }; let exportedLogger = fakeLogger; // let lastCallTime; // function formatMsgWithTimeInfo(type, msg) { // const now = Date.now(); // const diff = lastCallTime ? '+' + (now - lastCallTime) : '0'; // lastCallTime = now; // msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )'; // return msg; // } function consolePrintFn(type) { const func = self.console[type]; if (func) { return func.bind(self.console, `[${type}] >`); } return noop; } function exportLoggerFunctions(debugConfig, ...functions) { functions.forEach(function (type) { exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type); }); } function enableLogs(debugConfig, id) { // check that console is available if (self.console && debugConfig === true || typeof debugConfig === 'object') { exportLoggerFunctions(debugConfig, // Remove out from list here to hard-disable a log-level // 'trace', 'debug', 'log', 'info', 'warn', 'error'); // Some browsers don't allow to use bind on console object anyway // fallback to default if needed try { exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.4.12"}`); } catch (e) { exportedLogger = fakeLogger; } } else { exportedLogger = fakeLogger; } } const logger = exportedLogger; const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/; const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g; // adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js class AttrList { constructor(attrs) { if (typeof attrs === 'string') { attrs = AttrList.parseAttrList(attrs); } for (const attr in attrs) { if (attrs.hasOwnProperty(attr)) { if (attr.substring(0, 2) === 'X-') { this.clientAttrs = this.clientAttrs || []; this.clientAttrs.push(attr); } this[attr] = attrs[attr]; } } } decimalInteger(attrName) { const intValue = parseInt(this[attrName], 10); if (intValue > Number.MAX_SAFE_INTEGER) { return Infinity; } return intValue; } hexadecimalInteger(attrName) { if (this[attrName]) { let stringValue = (this[attrName] || '0x').slice(2); stringValue = (stringValue.length & 1 ? '0' : '') + stringValue; const value = new Uint8Array(stringValue.length / 2); for (let i = 0; i < stringValue.length / 2; i++) { value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16); } return value; } else { return null; } } hexadecimalIntegerAsNumber(attrName) { const intValue = parseInt(this[attrName], 16); if (intValue > Number.MAX_SAFE_INTEGER) { return Infinity; } return intValue; } decimalFloatingPoint(attrName) { return parseFloat(this[attrName]); } optionalFloat(attrName, defaultValue) { const value = this[attrName]; return value ? parseFloat(value) : defaultValue; } enumeratedString(attrName) { return this[attrName]; } bool(attrName) { return this[attrName] === 'YES'; } decimalResolution(attrName) { const res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]); if (res === null) { return undefined; } return { width: parseInt(res[1], 10), height: parseInt(res[2], 10) }; } static parseAttrList(input) { let match; const attrs = {}; const quote = '"'; ATTR_LIST_REGEX.lastIndex = 0; while ((match = ATTR_LIST_REGEX.exec(input)) !== null) { let value = match[2]; if (value.indexOf(quote) === 0 && value.lastIndexOf(quote) === value.length - 1) { value = value.slice(1, -1); } const name = match[1].trim(); attrs[name] = value; } return attrs; } } // Avoid exporting const enum so that these values can be inlined function isDateRangeCueAttribute(attrName) { return attrName !== "ID" && attrName !== "CLASS" && attrName !== "START-DATE" && attrName !== "DURATION" && attrName !== "END-DATE" && attrName !== "END-ON-NEXT"; } function isSCTE35Attribute(attrName) { return attrName === "SCTE35-OUT" || attrName === "SCTE35-IN"; } class DateRange { constructor(dateRangeAttr, dateRangeWithSameId) { this.attr = void 0; this._startDate = void 0; this._endDate = void 0; this._badValueForSameId = void 0; if (dateRangeWithSameId) { const previousAttr = dateRangeWithSameId.attr; for (const key in previousAttr) { if (Object.prototype.hasOwnProperty.call(dateRangeAttr, key) && dateRangeAttr[key] !== previousAttr[key]) { logger.warn(`DATERANGE tag attribute: "${key}" does not match for tags with ID: "${dateRangeAttr.ID}"`); this._badValueForSameId = key; break; } } // Merge DateRange tags with the same ID dateRangeAttr = _extends(new AttrList({}), previousAttr, dateRangeAttr); } this.attr = dateRangeAttr; this._startDate = new Date(dateRangeAttr["START-DATE"]); if ("END-DATE" in this.attr) { const endDate = new Date(this.attr["END-DATE"]); if (isFiniteNumber(endDate.getTime())) { this._endDate = endDate; } } } get id() { return this.attr.ID; } get class() { return this.attr.CLASS; } get startDate() { return this._startDate; } get endDate() { if (this._endDate) { return this._endDate; } const duration = this.duration; if (duration !== null) { return new Date(this._startDate.getTime() + duration * 1000); } return null; } get duration() { if ("DURATION" in this.attr) { const duration = this.attr.decimalFloatingPoint("DURATION"); if (isFiniteNumber(duration)) { return duration; } } else if (this._endDate) { return (this._endDate.getTime() - this._startDate.getTime()) / 1000; } return null; } get plannedDuration() { if ("PLANNED-DURATION" in this.attr) { return this.attr.decimalFloatingPoint("PLANNED-DURATION"); } return null; } get endOnNext() { return this.attr.bool("END-ON-NEXT"); } get isValid() { return !!this.id && !this._badValueForSameId && isFiniteNumber(this.startDate.getTime()) && (this.duration === null || this.duration >= 0) && (!this.endOnNext || !!this.class); } } class LoadStats { constructor() { this.aborted = false; this.loaded = 0; this.retry = 0; this.total = 0; this.chunkCount = 0; this.bwEstimate = 0; this.loading = { start: 0, first: 0, end: 0 }; this.parsing = { start: 0, end: 0 }; this.buffering = { start: 0, first: 0, end: 0 }; } } var ElementaryStreamTypes = { AUDIO: "audio", VIDEO: "video", AUDIOVIDEO: "audiovideo" }; class BaseSegment { // baseurl is the URL to the playlist // relurl is the portion of the URL that comes from inside the playlist. // Holds the types of data this fragment supports constructor(baseurl) { this._byteRange = null; this._url = null; this.baseurl = void 0; this.relurl = void 0; this.elementaryStreams = { [ElementaryStreamTypes.AUDIO]: null, [ElementaryStreamTypes.VIDEO]: null, [ElementaryStreamTypes.AUDIOVIDEO]: null }; this.baseurl = baseurl; } // setByteRange converts a EXT-X-BYTERANGE attribute into a two element array setByteRange(value, previous) { const params = value.split('@', 2); const byteRange = []; if (params.length === 1) { byteRange[0] = previous ? previous.byteRangeEndOffset : 0; } else { byteRange[0] = parseInt(params[1]); } byteRange[1] = parseInt(params[0]) + byteRange[0]; this._byteRange = byteRange; } get byteRange() { if (!this._byteRange) { return []; } return this._byteRange; } get byteRangeStartOffset() { return this.byteRange[0]; } get byteRangeEndOffset() { return this.byteRange[1]; } get url() { if (!this._url && this.baseurl && this.relurl) { this._url = urlToolkitExports.buildAbsoluteURL(this.baseurl, this.relurl, { alwaysNormalize: true }); } return this._url || ''; } set url(value) { this._url = value; } } /** * Object representing parsed data from an HLS Segment. Found in {@link hls.js#LevelDetails.fragments}. */ class Fragment extends BaseSegment { // EXTINF has to be present for a m3u8 to be considered valid // sn notates the sequence number for a segment, and if set to a string can be 'initSegment' // levelkeys are the EXT-X-KEY tags that apply to this segment for decryption // core difference from the private field _decryptdata is the lack of the initialized IV // _decryptdata will set the IV for this segment based on the segment number in the fragment // A string representing the fragment type // A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading // A reference to the key loader. Set while the key is loading, and removed afterwards. Used to abort key loading // The level/track index to which the fragment belongs // The continuity counter of the fragment // The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete. // The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete. // The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete. // The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete. // The start time of the fragment, as listed in the manifest. Updated after transmux complete. // Set by `updateFragPTSDTS` in level-helper // The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete. // The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete. // Load/parse timing information // A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered // #EXTINF segment title // The Media Initialization Section for this segment // Fragment is the last fragment in the media playlist // Fragment is marked by an EXT-X-GAP tag indicating that it does not contain media data and should not be loaded constructor(type, baseurl) { super(baseurl); this._decryptdata = null; this.rawProgramDateTime = null; this.programDateTime = null; this.tagList = []; this.duration = 0; this.sn = 0; this.levelkeys = void 0; this.type = void 0; this.loader = null; this.keyLoader = null; this.level = -1; this.cc = 0; this.startPTS = void 0; this.endPTS = void 0; this.startDTS = void 0; this.endDTS = void 0; this.start = 0; this.deltaPTS = void 0; this.maxStartPTS = void 0; this.minEndPTS = void 0; this.stats = new LoadStats(); this.urlId = 0; this.data = void 0; this.bitrateTest = false; this.title = null; this.initSegment = null; this.endList = void 0; this.gap = void 0; this.type = type; } get decryptdata() { const { levelkeys } = this; if (!levelkeys && !this._decryptdata) { return null; } if (!this._decryptdata && this.levelkeys && !this.levelkeys.NONE) { const key = this.levelkeys.identity; if (key) { this._decryptdata = key.getDecryptData(this.sn); } else { const keyFormats = Object.keys(this.levelkeys); if (keyFormats.length === 1) { return this._decryptdata = this.levelkeys[keyFormats[0]].getDecryptData(this.sn); } } } return this._decryptdata; } get end() { return this.start + this.duration; } get endProgramDateTime() { if (this.programDateTime === null) { return null; } if (!isFiniteNumber(this.programDateTime)) { return null; } const duration = !isFiniteNumber(this.duration) ? 0 : this.duration; return this.programDateTime + duration * 1000; } get encrypted() { var _this$_decryptdata; // At the m3u8-parser level we need to add support for manifest signalled keyformats // when we want the fragment to start reporting that it is encrypted. // Currently, keyFormat will only be set for identity keys if ((_this$_decryptdata = this._decryptdata) != null && _this$_decryptdata.encrypted) { return true; } else if (this.levelkeys) { const keyFormats = Object.keys(this.levelkeys); const len = keyFormats.length; if (len > 1 || len === 1 && this.levelkeys[keyFormats[0]].encrypted) { return true; } } return false; } setKeyFormat(keyFormat) { if (this.levelkeys) { const key = this.levelkeys[keyFormat]; if (key && !this._decryptdata) { this._decryptdata = key.getDecryptData(this.sn); } } } abortRequests() { var _this$loader, _this$keyLoader; (_this$loader = this.loader) == null ? void 0 : _this$loader.abort(); (_this$keyLoader = this.keyLoader) == null ? void 0 : _this$keyLoader.abort(); } setElementaryStreamInfo(type, startPTS, endPTS, startDTS, endDTS, partial = false) { const { elementaryStreams } = this; const info = elementaryStreams[type]; if (!info) { elementaryStreams[type] = { startPTS, endPTS, startDTS, endDTS, partial }; return; } info.startPTS = Math.min(info.startPTS, startPTS); info.endPTS = Math.max(info.endPTS, endPTS); info.startDTS = Math.min(info.startDTS, startDTS); info.endDTS = Math.max(info.endDTS, endDTS); } clearElementaryStreamInfo() { const { elementaryStreams } = this; elementaryStreams[ElementaryStreamTypes.AUDIO] = null; elementaryStreams[ElementaryStreamTypes.VIDEO] = null; elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null; } } /** * Object representing parsed data from an HLS Partial Segment. Found in {@link hls.js#LevelDetails.partList}. */ class Part extends BaseSegment { constructor(partAttrs, frag, baseurl, index, previous) { super(baseurl); this.fragOffset = 0; this.duration = 0; this.gap = false; this.independent = false; this.relurl = void 0; this.fragment = void 0; this.index = void 0; this.stats = new LoadStats(); this.duration = partAttrs.decimalFloatingPoint('DURATION'); this.gap = partAttrs.bool('GAP'); this.independent = partAttrs.bool('INDEPENDENT'); this.relurl = partAttrs.enumeratedString('URI'); this.fragment = frag; this.index = index; const byteRange = partAttrs.enumeratedString('BYTERANGE'); if (byteRange) { this.setByteRange(byteRange, previous); } if (previous) { this.fragOffset = previous.fragOffset + previous.duration; } } get start() { return this.fragment.start + this.fragOffset; } get end() { return this.start + this.duration; } get loaded() { const { elementaryStreams } = this; return !!(elementaryStreams.audio || elementaryStreams.video || elementaryStreams.audiovideo); } } const DEFAULT_TARGET_DURATION = 10; /** * Object representing parsed data from an HLS Media Playlist. Found in {@link hls.js#Level.details}. */ class LevelDetails { // Manifest reload synchronization constructor(baseUrl) { this.PTSKnown = false; this.alignedSliding = false; this.averagetargetduration = void 0; this.endCC = 0; this.endSN = 0; this.fragments = void 0; this.fragmentHint = void 0; this.partList = null; this.dateRanges = void 0; this.live = true; this.ageHeader = 0; this.advancedDateTime = void 0; this.updated = true; this.advanced = true; this.availabilityDelay = void 0; this.misses = 0; this.startCC = 0; this.startSN = 0; this.startTimeOffset = null; this.targetduration = 0; this.totalduration = 0; this.type = null; this.url = void 0; this.m3u8 = ''; this.version = null; this.canBlockReload = false; this.canSkipUntil = 0; this.canSkipDateRanges = false; this.skippedSegments = 0; this.recentlyRemovedDateranges = void 0; this.partHoldBack = 0; this.holdBack = 0; this.partTarget = 0; this.preloadHint = void 0; this.renditionReports = void 0; this.tuneInGoal = 0; this.deltaUpdateFailed = void 0; this.driftStartTime = 0; this.driftEndTime = 0; this.driftStart = 0; this.driftEnd = 0; this.encryptedFragments = void 0; this.playlistParsingError = null; this.variableList = null; this.hasVariableRefs = false; this.fragments = []; this.encryptedFragments = []; this.dateRanges = {}; this.url = baseUrl; } reloaded(previous) { if (!previous) { this.advanced = true; this.updated = true; return; } const partSnDiff = this.lastPartSn - previous.lastPartSn; const partIndexDiff = this.lastPartIndex - previous.lastPartIndex; this.updated = this.endSN !== previous.endSN || !!partIndexDiff || !!partSnDiff || !this.live; this.advanced = this.endSN > previous.endSN || partSnDiff > 0 || partSnDiff === 0 && partIndexDiff > 0; if (this.updated || this.advanced) { this.misses = Math.floor(previous.misses * 0.6); } else { this.misses = previous.misses + 1; } this.availabilityDelay = previous.availabilityDelay; } get hasProgramDateTime() { if (this.fragments.length) { return isFiniteNumber(this.fragments[this.fragments.length - 1].programDateTime); } return false; } get levelTargetDuration() { return this.averagetargetduration || this.targetduration || DEFAULT_TARGET_DURATION; } get drift() { const runTime = this.driftEndTime - this.driftStartTime; if (runTime > 0) { const runDuration = this.driftEnd - this.driftStart; return runDuration * 1000 / runTime; } return 1; } get edge() { return this.partEnd || this.fragmentEnd; } get partEnd() { var _this$partList; if ((_this$partList = this.partList) != null && _this$partList.length) { return this.partList[this.partList.length - 1].end; } return this.fragmentEnd; } get fragmentEnd() { var _this$fragments; if ((_this$fragments = this.fragments) != null && _this$fragments.length) { return this.fragments[this.fragments.length - 1].end; } return 0; } get age() { if (this.advancedDateTime) { return Math.max(Date.now() - this.advancedDateTime, 0) / 1000; } return 0; } get lastPartIndex() { var _this$partList2; if ((_this$partList2 = this.partList) != null && _this$partList2.length) { return this.partList[this.partList.length - 1].index; } return -1; } get lastPartSn() { var _this$partList3; if ((_this$partList3 = this.partList) != null && _this$partList3.length) { return this.partList[this.partList.length - 1].fragment.sn; } return this.endSN; } } function base64Decode(base64encodedStr) { return Uint8Array.from(atob(base64encodedStr), c => c.charCodeAt(0)); } function getKeyIdBytes(str) { const keyIdbytes = strToUtf8array(str).subarray(0, 16); const paddedkeyIdbytes = new Uint8Array(16); paddedkeyIdbytes.set(keyIdbytes, 16 - keyIdbytes.length); return paddedkeyIdbytes; } function changeEndianness(keyId) { const swap = function swap(array, from, to) { const cur = array[from]; array[from] = array[to]; array[to] = cur; }; swap(keyId, 0, 3); swap(keyId, 1, 2); swap(keyId, 4, 5); swap(keyId, 6, 7); } function convertDataUriToArrayBytes(uri) { // data:[ const colonsplit = uri.split(':'); let keydata = null; if (colonsplit[0] === 'data' && colonsplit.length === 2) { const semicolonsplit = colonsplit[1].split(';'); const commasplit = semicolonsplit[semicolonsplit.length - 1].split(','); if (commasplit.length === 2) { const isbase64 = commasplit[0] === 'base64'; const data = commasplit[1]; if (isbase64) { semicolonsplit.splice(-1, 1); // remove from processing keydata = base64Decode(data); } else { keydata = getKeyIdBytes(data); } } } return keydata; } function strToUtf8array(str) { return Uint8Array.from(unescape(encodeURIComponent(str)), c => c.charCodeAt(0)); } /** * @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess */ var KeySystems = { CLEARKEY: "org.w3.clearkey", FAIRPLAY: "com.apple.fps", PLAYREADY: "com.microsoft.playready", WIDEVINE: "com.widevine.alpha" }; // Playlist #EXT-X-KEY KEYFORMAT values var KeySystemFormats = { CLEARKEY: "org.w3.clearkey", FAIRPLAY: "com.apple.streamingkeydelivery", PLAYREADY: "com.microsoft.playready", WIDEVINE: "urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed" }; function keySystemFormatToKeySystemDomain(format) { switch (format) { case KeySystemFormats.FAIRPLAY: return KeySystems.FAIRPLAY; case KeySystemFormats.PLAYREADY: return KeySystems.PLAYREADY; case KeySystemFormats.WIDEVINE: return KeySystems.WIDEVINE; case KeySystemFormats.CLEARKEY: return KeySystems.CLEARKEY; } } // System IDs for which we can extract a key ID from "encrypted" event PSSH var KeySystemIds = { WIDEVINE: "edef8ba979d64acea3c827dcd51d21ed" }; function keySystemIdToKeySystemDomain(systemId) { if (systemId === KeySystemIds.WIDEVINE) { return KeySystems.WIDEVINE; // } else if (systemId === KeySystemIds.PLAYREADY) { // return KeySystems.PLAYREADY; // } else if (systemId === KeySystemIds.CENC || systemId === KeySystemIds.CLEARKEY) { // return KeySystems.CLEARKEY; } } function keySystemDomainToKeySystemFormat(keySystem) { switch (keySystem) { case KeySystems.FAIRPLAY: return KeySystemFormats.FAIRPLAY; case KeySystems.PLAYREADY: return KeySystemFormats.PLAYREADY; case KeySystems.WIDEVINE: return KeySystemFormats.WIDEVINE; case KeySystems.CLEARKEY: return KeySystemFormats.CLEARKEY; } } function getKeySystemsForConfig(config) { const { drmSystems, widevineLicenseUrl } = config; const keySystemsToAttempt = drmSystems ? [KeySystems.FAIRPLAY, KeySystems.WIDEVINE, KeySystems.PLAYREADY, KeySystems.CLEARKEY].filter(keySystem => !!drmSystems[keySystem]) : []; if (!keySystemsToAttempt[KeySystems.WIDEVINE] && widevineLicenseUrl) { keySystemsToAttempt.push(KeySystems.WIDEVINE); } return keySystemsToAttempt; } const requestMediaKeySystemAccess = function () { if (typeof self !== 'undefined' && self.navigator && self.navigator.requestMediaKeySystemAccess) { return self.navigator.requestMediaKeySystemAccess.bind(self.navigator); } else { return null; } }(); /** * @see https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemConfiguration */ function getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, drmSystemOptions) { let initDataTypes; switch (keySystem) { case KeySystems.FAIRPLAY: initDataTypes = ['cenc', 'sinf']; break; case KeySystems.WIDEVINE: case KeySystems.PLAYREADY: initDataTypes = ['cenc']; break; case KeySystems.CLEARKEY: initDataTypes = ['cenc', 'keyids']; break; default: throw new Error(`Unknown key-system: ${keySystem}`); } return createMediaKeySystemConfigurations(initDataTypes, audioCodecs, videoCodecs, drmSystemOptions); } function createMediaKeySystemConfigurations(initDataTypes, audioCodecs, videoCodecs, drmSystemOptions) { const baseConfig = { initDataTypes: initDataTypes, persistentState: drmSystemOptions.persistentState || 'not-allowed', distinctiveIdentifier: drmSystemOptions.distinctiveIdentifier || 'not-allowed', sessionTypes: drmSystemOptions.sessionTypes || [drmSystemOptions.sessionType || 'temporary'], audioCapabilities: audioCodecs.map(codec => ({ contentType: `audio/mp4; codecs="${codec}"`, robustness: drmSystemOptions.audioRobustness || '', encryptionScheme: drmSystemOptions.audioEncryptionScheme || null })), videoCapabilities: videoCodecs.map(codec => ({ contentType: `video/mp4; codecs="${codec}"`, robustness: drmSystemOptions.videoRobustness || '', encryptionScheme: drmSystemOptions.videoEncryptionScheme || null })) }; return [baseConfig]; } function sliceUint8(array, start, end) { // @ts-expect-error This polyfills IE11 usage of Uint8Array slice. // It always exists in the TypeScript definition so fails, but it fails at runtime on IE11. return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end)); } // breaking up those two types in order to clarify what is happening in the decoding path. /** * Returns true if an ID3 header can be found at offset in data * @param data - The data to search * @param offset - The offset at which to start searching */ const isHeader$2 = (data, offset) => { /* * http://id3.org/id3v2.3.0 * [0] = 'I' * [1] = 'D' * [2] = '3' * [3,4] = {Version} * [5] = {Flags} * [6-9] = {ID3 Size} * * An ID3v2 tag can be detected with the following pattern: * $49 44 33 yy yy xx zz zz zz zz * Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80 */ if (offset + 10 <= data.length) { // look for 'ID3' identifier if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) { // check version is within range if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) { // check size is within range if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) { return true; } } } } return false; }; /** * Returns true if an ID3 footer can be found at offset in data * @param data - The data to search * @param offset - The offset at which to start searching */ const isFooter = (data, offset) => { /* * The footer is a copy of the header, but with a different identifier */ if (offset + 10 <= data.length) { // look for '3DI' identifier if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) { // check version is within range if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) { // check size is within range if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) { return true; } } } } return false; }; /** * Returns any adjacent ID3 tags found in data starting at offset, as one block of data * @param data - The data to search in * @param offset - The offset at which to start searching * @returns the block of data containing any ID3 tags found * or *undefined* if no header is found at the starting offset */ const getID3Data = (data, offset) => { const front = offset; let length = 0; while (isHeader$2(data, offset)) { // ID3 header is 10 bytes length += 10; const size = readSize(data, offset + 6); length += size; if (isFooter(data, offset + 10)) { // ID3 footer is 10 bytes length += 10; } offset += length; } if (length > 0) { return data.subarray(front, front + length); } return undefined; }; const readSize = (data, offset) => { let size = 0; size = (data[offset] & 0x7f) << 21; size |= (data[offset + 1] & 0x7f) << 14; size |= (data[offset + 2] & 0x7f) << 7; size |= data[offset + 3] & 0x7f; return size; }; const canParse$2 = (data, offset) => { return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset; }; /** * Searches for the Elementary Stream timestamp found in the ID3 data chunk * @param data - Block of data containing one or more ID3 tags */ const getTimeStamp = data => { const frames = getID3Frames(data); for (let i = 0; i < frames.length; i++) { const frame = frames[i]; if (isTimeStampFrame(frame)) { return readTimeStamp(frame); } } return undefined; }; /** * Returns true if the ID3 frame is an Elementary Stream timestamp frame */ const isTimeStampFrame = frame => { return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp'; }; const getFrameData = data => { /* Frame ID $xx xx xx xx (four characters) Size $xx xx xx xx Flags $xx xx */ const type = String.fromCharCode(data[0], data[1], data[2], data[3]); const size = readSize(data, 4); // skip frame id, size, and flags const offset = 10; return { type, size, data: data.subarray(offset, offset + size) }; }; /** * Returns an array of ID3 frames found in all the ID3 tags in the id3Data * @param id3Data - The ID3 data containing one or more ID3 tags */ const getID3Frames = id3Data => { let offset = 0; const frames = []; while (isHeader$2(id3Data, offset)) { const size = readSize(id3Data, offset + 6); // skip past ID3 header offset += 10; const end = offset + size; // loop through frames in the ID3 tag while (offset + 8 < end) { const frameData = getFrameData(id3Data.subarray(offset)); const frame = decodeFrame(frameData); if (frame) { frames.push(frame); } // skip frame header and frame data offset += frameData.size + 10; } if (isFooter(id3Data, offset)) { offset += 10; } } return frames; }; const decodeFrame = frame => { if (frame.type === 'PRIV') { return decodePrivFrame(frame); } else if (frame.type[0] === 'W') { return decodeURLFrame(frame); } return decodeTextFrame(frame); }; const decodePrivFrame = frame => { /* Format: \0 */ if (frame.size < 2) { return undefined; } const owner = utf8ArrayToStr(frame.data, true); const privateData = new Uint8Array(frame.data.subarray(owner.length + 1)); return { key: frame.type, info: owner, data: privateData.buffer }; }; const decodeTextFrame = frame => { if (frame.size < 2) { return undefined; } if (frame.type === 'TXXX') { /* Format: [0] = {Text Encoding} [1-?] = {Description}\0{Value} */ let index = 1; const description = utf8ArrayToStr(frame.data.subarray(index), true); index += description.length + 1; const value = utf8ArrayToStr(frame.data.subarray(index)); return { key: frame.type, info: description, data: value }; } /* Format: [0] = {Text Encoding} [1-?] = {Value} */ const text = utf8ArrayToStr(frame.data.subarray(1)); return { key: frame.type, data: text }; }; const decodeURLFrame = frame => { if (frame.type === 'WXXX') { /* Format: [0] = {Text Encoding} [1-?] = {Description}\0{URL} */ if (frame.size < 2) { return undefined; } let index = 1; const description = utf8ArrayToStr(frame.data.subarray(index), true); index += description.length + 1; const value = utf8ArrayToStr(frame.data.subarray(index)); return { key: frame.type, info: description, data: value }; } /* Format: [0-?] = {URL} */ const url = utf8ArrayToStr(frame.data); return { key: frame.type, data: url }; }; const readTimeStamp = timeStampFrame => { if (timeStampFrame.data.byteLength === 8) { const data = new Uint8Array(timeStampFrame.data); // timestamp is 33 bit expressed as a big-endian eight-octet number, // with the upper 31 bits set to zero. const pts33Bit = data[3] & 0x1; let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7]; timestamp /= 45; if (pts33Bit) { timestamp += 47721858.84; } // 2^32 / 90 return Math.round(timestamp); } return undefined; }; // http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197 // http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt /* utf.js - UTF-8 <=> UTF-16 convertion * * Copyright (C) 1999 Masanao Izumo * Version: 1.0 * LastModified: Dec 25 1999 * This library is free. You can redistribute it and/or modify it. */ const utf8ArrayToStr = (array, exitOnNull = false) => { const decoder = getTextDecoder(); if (decoder) { const decoded = decoder.decode(array); if (exitOnNull) { // grab up to the first null const idx = decoded.indexOf('\0'); return idx !== -1 ? decoded.substring(0, idx) : decoded; } // remove any null characters return decoded.replace(/\0/g, ''); } const len = array.length; let c; let char2; let char3; let out = ''; let i = 0; while (i < len) { c = array[i++]; if (c === 0x00 && exitOnNull) { return out; } else if (c === 0x00 || c === 0x03) { // If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it continue; } switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: // 0xxxxxxx out += String.fromCharCode(c); break; case 12: case 13: // 110x xxxx 10xx xxxx char2 = array[i++]; out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f); break; case 14: // 1110 xxxx 10xx xxxx 10xx xxxx char2 = array[i++]; char3 = array[i++]; out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0); break; } } return out; }; let decoder; function getTextDecoder() { if (!decoder && typeof self.TextDecoder !== 'undefined') { decoder = new self.TextDecoder('utf-8'); } return decoder; } /** * hex dump helper class */ const Hex = { hexDump: function (array) { let str = ''; for (let i = 0; i < array.length; i++) { let h = array[i].toString(16); if (h.length < 2) { h = '0' + h; } str += h; } return str; } }; const UINT32_MAX$1 = Math.pow(2, 32) - 1; const push = [].push; // We are using fixed track IDs for driving the MP4 remuxer // instead of following the TS PIDs. // There is no reason not to do this and some browsers/SourceBuffer-demuxers // may not like if there are TrackID "switches" // See https://github.com/video-dev/hls.js/issues/1331 // Here we are mapping our internal track types to constant MP4 track IDs // With MSE currently one can only have one track of each, and we are muxing // whatever video/audio rendition in them. const RemuxerTrackIdConfig = { video: 1, audio: 2, id3: 3, text: 4 }; function bin2str(data) { return String.fromCharCode.apply(null, data); } function readUint16(buffer, offset) { const val = buffer[offset] << 8 | buffer[offset + 1]; return val < 0 ? 65536 + val : val; } function readUint32(buffer, offset) { const val = readSint32(buffer, offset); return val < 0 ? 4294967296 + val : val; } function readSint32(buffer, offset) { return buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3]; } function writeUint32(buffer, offset, value) { buffer[offset] = value >> 24; buffer[offset + 1] = value >> 16 & 0xff; buffer[offset + 2] = value >> 8 & 0xff; buffer[offset + 3] = value & 0xff; } // Find the data for a box specified by its path function findBox(data, path) { const results = []; if (!path.length) { // short-circuit the search for empty paths return results; } const end = data.byteLength; for (let i = 0; i < end;) { const size = readUint32(data, i); const type = bin2str(data.subarray(i + 4, i + 8)); const endbox = size > 1 ? i + size : end; if (type === path[0]) { if (path.length === 1) { // this is the end of the path and we've found the box we were // looking for results.push(data.subarray(i + 8, endbox)); } else { // recursively search for the next box along the path const subresults = findBox(data.subarray(i + 8, endbox), path.slice(1)); if (subresults.length) { push.apply(results, subresults); } } } i = endbox; } // we've finished searching all of data return results; } function parseSegmentIndex(sidx) { const references = []; const version = sidx[0]; // set initial offset, we skip the reference ID (not needed) let index = 8; const timescale = readUint32(sidx, index); index += 4; // TODO: parse earliestPresentationTime and firstOffset // usually zero in our case const earliestPresentationTime = 0; const firstOffset = 0; if (version === 0) { index += 8; } else { index += 16; } // skip reserved index += 2; let startByte = sidx.length + firstOffset; const referencesCount = readUint16(sidx, index); index += 2; for (let i = 0; i < referencesCount; i++) { let referenceIndex = index; const referenceInfo = readUint32(sidx, referenceIndex); referenceIndex += 4; const referenceSize = referenceInfo & 0x7fffffff; const referenceType = (referenceInfo & 0x80000000) >>> 31; if (referenceType === 1) { logger.warn('SIDX has hierarchical references (not supported)'); return null; } const subsegmentDuration = readUint32(sidx, referenceIndex); referenceIndex += 4; references.push({ referenceSize, subsegmentDuration, // unscaled info: { duration: subsegmentDuration / timescale, start: startByte, end: startByte + referenceSize - 1 } }); startByte += referenceSize; // Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits // for |sapDelta|. referenceIndex += 4; // skip to next ref index = referenceIndex; } return { earliestPresentationTime, timescale, version, referencesCount, references }; } /** * Parses an MP4 initialization segment and extracts stream type and * timescale values for any declared tracks. Timescale values indicate the * number of clock ticks per second to assume for time-based values * elsewhere in the MP4. * * To determine the start time of an MP4, you need two pieces of * information: the timescale unit and the earliest base media decode * time. Multiple timescales can be specified within an MP4 but the * base media decode time is always expressed in the timescale from * the media header box for the track: * ``` * moov > trak > mdia > mdhd.timescale * moov > trak > mdia > hdlr * ``` * @param initSegment the bytes of the init segment * @returns a hash of track type to timescale values or null if * the init segment is malformed. */ function parseInitSegment(initSegment) { const result = []; const traks = findBox(initSegment, ['moov', 'trak']); for (let i = 0; i < traks.length; i++) { const trak = traks[i]; const tkhd = findBox(trak, ['tkhd'])[0]; if (tkhd) { let version = tkhd[0]; let index = version === 0 ? 12 : 20; const trackId = readUint32(tkhd, index); const mdhd = findBox(trak, ['mdia', 'mdhd'])[0]; if (mdhd) { version = mdhd[0]; index = version === 0 ? 12 : 20; const timescale = readUint32(mdhd, index); const hdlr = findBox(trak, ['mdia', 'hdlr'])[0]; if (hdlr) { const hdlrType = bin2str(hdlr.subarray(8, 12)); const type = { soun: ElementaryStreamTypes.AUDIO, vide: ElementaryStreamTypes.VIDEO }[hdlrType]; if (type) { // Parse codec details const stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0]; let codec; if (stsd) { codec = bin2str(stsd.subarray(12, 16)); // TODO: Parse codec details to be able to build MIME type. // stsd.start += 8; // const codecBox = findBox(stsd, [codec])[0]; // if (codecBox) { // TODO: Codec parsing support for avc1, mp4a, hevc, av01... // } } result[trackId] = { timescale, type }; result[type] = { timescale, id: trackId, codec }; } } } } } const trex = findBox(initSegment, ['moov', 'mvex', 'trex']); trex.forEach(trex => { const trackId = readUint32(trex, 4); const track = result[trackId]; if (track) { track.default = { duration: readUint32(trex, 12), flags: readUint32(trex, 20) }; } }); return result; } function patchEncyptionData(initSegment, decryptdata) { if (!initSegment || !decryptdata) { return initSegment; } const keyId = decryptdata.keyId; if (keyId && decryptdata.isCommonEncryption) { const traks = findBox(initSegment, ['moov', 'trak']); traks.forEach(trak => { const stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0]; // skip the sample entry count const sampleEntries = stsd.subarray(8); let encBoxes = findBox(sampleEntries, ['enca']); const isAudio = encBoxes.length > 0; if (!isAudio) { encBoxes = findBox(sampleEntries, ['encv']); } encBoxes.forEach(enc => { const encBoxChildren = isAudio ? enc.subarray(28) : enc.subarray(78); const sinfBoxes = findBox(encBoxChildren, ['sinf']); sinfBoxes.forEach(sinf => { const tenc = parseSinf(sinf); if (tenc) { // Look for default key id (keyID offset is always 8 within the tenc box): const tencKeyId = tenc.subarray(8, 24); if (!tencKeyId.some(b => b !== 0)) { logger.log(`[eme] Patching keyId in 'enc${isAudio ? 'a' : 'v'}>sinf>>tenc' box: ${Hex.hexDump(tencKeyId)} -> ${Hex.hexDump(keyId)}`); tenc.set(keyId, 8); } } }); }); }); } return initSegment; } function parseSinf(sinf) { const schm = findBox(sinf, ['schm'])[0]; if (schm) { const scheme = bin2str(schm.subarray(4, 8)); if (scheme === 'cbcs' || scheme === 'cenc') { return findBox(sinf, ['schi', 'tenc'])[0]; } } logger.error(`[eme] missing 'schm' box`); return null; } /** * Determine the base media decode start time, in seconds, for an MP4 * fragment. If multiple fragments are specified, the earliest time is * returned. * * The base media decode time can be parsed from track fragment * metadata: * ``` * moof > traf > tfdt.baseMediaDecodeTime * ``` * It requires the timescale value from the mdhd to interpret. * * @param initData - a hash of track type to timescale values * @param fmp4 - the bytes of the mp4 fragment * @returns the earliest base media decode start time for the * fragment, in seconds */ function getStartDTS(initData, fmp4) { // we need info from two children of each track fragment box return findBox(fmp4, ['moof', 'traf']).reduce((result, traf) => { const tfdt = findBox(traf, ['tfdt'])[0]; const version = tfdt[0]; const start = findBox(traf, ['tfhd']).reduce((result, tfhd) => { // get the track id from the tfhd const id = readUint32(tfhd, 4); const track = initData[id]; if (track) { let baseTime = readUint32(tfdt, 4); if (version === 1) { // If value is too large, assume signed 64-bit. Negative track fragment decode times are invalid, but they exist in the wild. // This prevents large values from being used for initPTS, which can cause playlist sync issues. // https://github.com/video-dev/hls.js/issues/5303 if (baseTime === UINT32_MAX$1) { logger.warn(`[mp4-demuxer]: Ignoring assumed invalid signed 64-bit track fragment decode time`); return result; } baseTime *= UINT32_MAX$1 + 1; baseTime += readUint32(tfdt, 8); } // assume a 90kHz clock if no timescale was specified const scale = track.timescale || 90e3; // convert base time to seconds const startTime = baseTime / scale; if (isFinite(startTime) && (result === null || startTime < result)) { return startTime; } } return result; }, null); if (start !== null && isFinite(start) && (result === null || start < result)) { return start; } return result; }, null); } /* For Reference: aligned(8) class TrackFragmentHeaderBox extends FullBox(‘tfhd’, 0, tf_flags){ unsigned int(32) track_ID; // all the following are optional fields unsigned int(64) base_data_offset; unsigned int(32) sample_description_index; unsigned int(32) default_sample_duration; unsigned int(32) default_sample_size; unsigned int(32) default_sample_flags } */ function getDuration(data, initData) { let rawDuration = 0; let videoDuration = 0; let audioDuration = 0; const trafs = findBox(data, ['moof', 'traf']); for (let i = 0; i < trafs.length; i++) { const traf = trafs[i]; // There is only one tfhd & trun per traf // This is true for CMAF style content, and we should perhaps check the ftyp // and only look for a single trun then, but for ISOBMFF we should check // for multiple track runs. const tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhd const id = readUint32(tfhd, 4); const track = initData[id]; if (!track) { continue; } const trackDefault = track.default; const tfhdFlags = readUint32(tfhd, 0) | (trackDefault == null ? void 0 : trackDefault.flags); let sampleDuration = trackDefault == null ? void 0 : trackDefault.duration; if (tfhdFlags & 0x000008) { // 0x000008 indicates the presence of the default_sample_duration field if (tfhdFlags & 0x000002) { // 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration // If present, the default_sample_duration exists at byte offset 12 sampleDuration = readUint32(tfhd, 12); } else { // Otherwise, the duration is at byte offset 8 sampleDuration = readUint32(tfhd, 8); } } // assume a 90kHz clock if no timescale was specified const timescale = track.timescale || 90e3; const truns = findBox(traf, ['trun']); for (let j = 0; j < truns.length; j++) { rawDuration = computeRawDurationFromSamples(truns[j]); if (!rawDuration && sampleDuration) { const sampleCount = readUint32(truns[j], 4); rawDuration = sampleDuration * sampleCount; } if (track.type === ElementaryStreamTypes.VIDEO) { videoDuration += rawDuration / timescale; } else if (track.type === ElementaryStreamTypes.AUDIO) { audioDuration += rawDuration / timescale; } } } if (videoDuration === 0 && audioDuration === 0) { // If duration samples are not available in the traf use sidx subsegment_duration let sidxDuration = 0; const sidxs = findBox(data, ['sidx']); for (let i = 0; i < sidxs.length; i++) { const sidx = parseSegmentIndex(sidxs[i]); if (sidx != null && sidx.references) { sidxDuration += sidx.references.reduce((dur, ref) => dur + ref.info.duration || 0, 0); } } return sidxDuration; } if (videoDuration) { return videoDuration; } return audioDuration; } /* For Reference: aligned(8) class TrackRunBox extends FullBox(‘trun’, version, tr_flags) { unsigned int(32) sample_count; // the following are optional fields signed int(32) data_offset; unsigned int(32) first_sample_flags; // all fields in the following array are optional { unsigned int(32) sample_duration; unsigned int(32) sample_size; unsigned int(32) sample_flags if (version == 0) { unsigned int(32) else { signed int(32) }[ sample_count ] } */ function computeRawDurationFromSamples(trun) { const flags = readUint32(trun, 0); // Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in. // Each field is an int32, which is 4 bytes let offset = 8; // data-offset-present flag if (flags & 0x000001) { offset += 4; } // first-sample-flags-present flag if (flags & 0x000004) { offset += 4; } let duration = 0; const sampleCount = readUint32(trun, 4); for (let i = 0; i < sampleCount; i++) { // sample-duration-present flag if (flags & 0x000100) { const sampleDuration = readUint32(trun, offset); duration += sampleDuration; offset += 4; } // sample-size-present flag if (flags & 0x000200) { offset += 4; } // sample-flags-present flag if (flags & 0x000400) { offset += 4; } // sample-composition-time-offsets-present flag if (flags & 0x000800) { offset += 4; } } return duration; } function offsetStartDTS(initData, fmp4, timeOffset) { findBox(fmp4, ['moof', 'traf']).forEach(traf => { findBox(traf, ['tfhd']).forEach(tfhd => { // get the track id from the tfhd const id = readUint32(tfhd, 4); const track = initData[id]; if (!track) { return; } // assume a 90kHz clock if no timescale was specified const timescale = track.timescale || 90e3; // get the base media decode time from the tfdt findBox(traf, ['tfdt']).forEach(tfdt => { const version = tfdt[0]; let baseMediaDecodeTime = readUint32(tfdt, 4); if (version === 0) { baseMediaDecodeTime -= timeOffset * timescale; baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0); writeUint32(tfdt, 4, baseMediaDecodeTime); } else { baseMediaDecodeTime *= Math.pow(2, 32); baseMediaDecodeTime += readUint32(tfdt, 8); baseMediaDecodeTime -= timeOffset * timescale; baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0); const upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX$1 + 1)); const lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX$1 + 1)); writeUint32(tfdt, 4, upper); writeUint32(tfdt, 8, lower); } }); }); }); } // TODO: Check if the last moof+mdat pair is part of the valid range function segmentValidRange(data) { const segmentedRange = { valid: null, remainder: null }; const moofs = findBox(data, ['moof']); if (!moofs) { return segmentedRange; } else if (moofs.length < 2) { segmentedRange.remainder = data; return segmentedRange; } const last = moofs[moofs.length - 1]; // Offset by 8 bytes; findBox offsets the start by as much segmentedRange.valid = sliceUint8(data, 0, last.byteOffset - 8); segmentedRange.remainder = sliceUint8(data, last.byteOffset - 8); return segmentedRange; } function appendUint8Array(data1, data2) { const temp = new Uint8Array(data1.length + data2.length); temp.set(data1); temp.set(data2, data1.length); return temp; } function parseSamples(timeOffset, track) { const seiSamples = []; const videoData = track.samples; const timescale = track.timescale; const trackId = track.id; let isHEVCFlavor = false; const moofs = findBox(videoData, ['moof']); moofs.map(moof => { const moofOffset = moof.byteOffset - 8; const trafs = findBox(moof, ['traf']); trafs.map(traf => { // get the base media decode time from the tfdt const baseTime = findBox(traf, ['tfdt']).map(tfdt => { const version = tfdt[0]; let result = readUint32(tfdt, 4); if (version === 1) { result *= Math.pow(2, 32); result += readUint32(tfdt, 8); } return result / timescale; })[0]; if (baseTime !== undefined) { timeOffset = baseTime; } return findBox(traf, ['tfhd']).map(tfhd => { const id = readUint32(tfhd, 4); const tfhdFlags = readUint32(tfhd, 0) & 0xffffff; const baseDataOffsetPresent = (tfhdFlags & 0x000001) !== 0; const sampleDescriptionIndexPresent = (tfhdFlags & 0x000002) !== 0; const defaultSampleDurationPresent = (tfhdFlags & 0x000008) !== 0; let defaultSampleDuration = 0; const defaultSampleSizePresent = (tfhdFlags & 0x000010) !== 0; let defaultSampleSize = 0; const defaultSampleFlagsPresent = (tfhdFlags & 0x000020) !== 0; let tfhdOffset = 8; if (id === trackId) { if (baseDataOffsetPresent) { tfhdOffset += 8; } if (sampleDescriptionIndexPresent) { tfhdOffset += 4; } if (defaultSampleDurationPresent) { defaultSampleDuration = readUint32(tfhd, tfhdOffset); tfhdOffset += 4; } if (defaultSampleSizePresent) { defaultSampleSize = readUint32(tfhd, tfhdOffset); tfhdOffset += 4; } if (defaultSampleFlagsPresent) { tfhdOffset += 4; } if (track.type === 'video') { isHEVCFlavor = isHEVC(track.codec); } findBox(traf, ['trun']).map(trun => { const version = trun[0]; const flags = readUint32(trun, 0) & 0xffffff; const dataOffsetPresent = (flags & 0x000001) !== 0; let dataOffset = 0; const firstSampleFlagsPresent = (flags & 0x000004) !== 0; const sampleDurationPresent = (flags & 0x000100) !== 0; let sampleDuration = 0; const sampleSizePresent = (flags & 0x000200) !== 0; let sampleSize = 0; const sampleFlagsPresent = (flags & 0x000400) !== 0; const sampleCompositionOffsetsPresent = (flags & 0x000800) !== 0; let compositionOffset = 0; const sampleCount = readUint32(trun, 4); let trunOffset = 8; // past version, flags, and sample count if (dataOffsetPresent) { dataOffset = readUint32(trun, trunOffset); trunOffset += 4; } if (firstSampleFlagsPresent) { trunOffset += 4; } let sampleOffset = dataOffset + moofOffset; for (let ix = 0; ix < sampleCount; ix++) { if (sampleDurationPresent) { sampleDuration = readUint32(trun, trunOffset); trunOffset += 4; } else { sampleDuration = defaultSampleDuration; } if (sampleSizePresent) { sampleSize = readUint32(trun, trunOffset); trunOffset += 4; } else { sampleSize = defaultSampleSize; } if (sampleFlagsPresent) { trunOffset += 4; } if (sampleCompositionOffsetsPresent) { if (version === 0) { compositionOffset = readUint32(trun, trunOffset); } else { compositionOffset = readSint32(trun, trunOffset); } trunOffset += 4; } if (track.type === ElementaryStreamTypes.VIDEO) { let naluTotalSize = 0; while (naluTotalSize < sampleSize) { const naluSize = readUint32(videoData, sampleOffset); sampleOffset += 4; if (isSEIMessage(isHEVCFlavor, videoData[sampleOffset])) { const data = videoData.subarray(sampleOffset, sampleOffset + naluSize); parseSEIMessageFromNALu(data, isHEVCFlavor ? 2 : 1, timeOffset + compositionOffset / timescale, seiSamples); } sampleOffset += naluSize; naluTotalSize += naluSize + 4; } } timeOffset += sampleDuration / timescale; } }); } }); }); }); return seiSamples; } function isHEVC(codec) { if (!codec) { return false; } const delimit = codec.indexOf('.'); const baseCodec = delimit < 0 ? codec : codec.substring(0, delimit); return baseCodec === 'hvc1' || baseCodec === 'hev1' || // Dolby Vision baseCodec === 'dvh1' || baseCodec === 'dvhe'; } function isSEIMessage(isHEVCFlavor, naluHeader) { if (isHEVCFlavor) { const naluType = naluHeader >> 1 & 0x3f; return naluType === 39 || naluType === 40; } else { const naluType = naluHeader & 0x1f; return naluType === 6; } } function parseSEIMessageFromNALu(unescapedData, headerSize, pts, samples) { const data = discardEPB(unescapedData); let seiPtr = 0; // skip nal header seiPtr += headerSize; let payloadType = 0; let payloadSize = 0; let endOfCaptions = false; let b = 0; while (seiPtr < data.length) { payloadType = 0; do { if (seiPtr >= data.length) { break; } b = data[seiPtr++]; payloadType += b; } while (b === 0xff); // Parse payload size. payloadSize = 0; do { if (seiPtr >= data.length) { break; } b = data[seiPtr++]; payloadSize += b; } while (b === 0xff); const leftOver = data.length - seiPtr; if (!endOfCaptions && payloadType === 4 && seiPtr < data.length) { endOfCaptions = true; const countryCode = data[seiPtr++]; if (countryCode === 181) { const providerCode = readUint16(data, seiPtr); seiPtr += 2; if (providerCode === 49) { const userStructure = readUint32(data, seiPtr); seiPtr += 4; if (userStructure === 0x47413934) { const userDataType = data[seiPtr++]; // Raw CEA-608 bytes wrapped in CEA-708 packet if (userDataType === 3) { const firstByte = data[seiPtr++]; const totalCCs = 0x1f & firstByte; const enabled = 0x40 & firstByte; const totalBytes = enabled ? 2 + totalCCs * 3 : 0; const byteArray = new Uint8Array(totalBytes); if (enabled) { byteArray[0] = firstByte; for (let i = 1; i < totalBytes; i++) { byteArray[i] = data[seiPtr++]; } } samples.push({ type: userDataType, payloadType, pts, bytes: byteArray }); } } } } } else if (payloadType === 5 && payloadSize < leftOver) { endOfCaptions = true; if (payloadSize > 16) { const uuidStrArray = []; for (let i = 0; i < 16; i++) { const _b = data[seiPtr++].toString(16); uuidStrArray.push(_b.length == 1 ? '0' + _b : _b); if (i === 3 || i === 5 || i === 7 || i === 9) { uuidStrArray.push('-'); } } const length = payloadSize - 16; const userDataBytes = new Uint8Array(length); for (let i = 0; i < length; i++) { userDataBytes[i] = data[seiPtr++]; } samples.push({ payloadType, pts, uuid: uuidStrArray.join(''), userData: utf8ArrayToStr(userDataBytes), userDataBytes }); } } else if (payloadSize < leftOver) { seiPtr += payloadSize; } else if (payloadSize > leftOver) { break; } } } /** * remove Emulation Prevention bytes from a RBSP */ function discardEPB(data) { const length = data.byteLength; const EPBPositions = []; let i = 1; // Find all `Emulation Prevention Bytes` while (i < length - 2) { if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) { EPBPositions.push(i + 2); i += 2; } else { i++; } } // If no Emulation Prevention Bytes were found just return the original // array if (EPBPositions.length === 0) { return data; } // Create a new array to hold the NAL unit data const newLength = length - EPBPositions.length; const newData = new Uint8Array(newLength); let sourceIndex = 0; for (i = 0; i < newLength; sourceIndex++, i++) { if (sourceIndex === EPBPositions[0]) { // Skip this byte sourceIndex++; // Remove this position index EPBPositions.shift(); } newData[i] = data[sourceIndex]; } return newData; } function parseEmsg(data) { const version = data[0]; let schemeIdUri = ''; let value = ''; let timeScale = 0; let presentationTimeDelta = 0; let presentationTime = 0; let eventDuration = 0; let id = 0; let offset = 0; if (version === 0) { while (bin2str(data.subarray(offset, offset + 1)) !== '\0') { schemeIdUri += bin2str(data.subarray(offset, offset + 1)); offset += 1; } schemeIdUri += bin2str(data.subarray(offset, offset + 1)); offset += 1; while (bin2str(data.subarray(offset, offset + 1)) !== '\0') { value += bin2str(data.subarray(offset, offset + 1)); offset += 1; } value += bin2str(data.subarray(offset, offset + 1)); offset += 1; timeScale = readUint32(data, 12); presentationTimeDelta = readUint32(data, 16); eventDuration = readUint32(data, 20); id = readUint32(data, 24); offset = 28; } else if (version === 1) { offset += 4; timeScale = readUint32(data, offset); offset += 4; const leftPresentationTime = readUint32(data, offset); offset += 4; const rightPresentationTime = readUint32(data, offset); offset += 4; presentationTime = 2 ** 32 * leftPresentationTime + rightPresentationTime; if (!Number.isSafeInteger(presentationTime)) { presentationTime = Number.MAX_SAFE_INTEGER; logger.warn('Presentation time exceeds safe integer limit and wrapped to max safe integer in parsing emsg box'); } eventDuration = readUint32(data, offset); offset += 4; id = readUint32(data, offset); offset += 4; while (bin2str(data.subarray(offset, offset + 1)) !== '\0') { schemeIdUri += bin2str(data.subarray(offset, offset + 1)); offset += 1; } schemeIdUri += bin2str(data.subarray(offset, offset + 1)); offset += 1; while (bin2str(data.subarray(offset, offset + 1)) !== '\0') { value += bin2str(data.subarray(offset, offset + 1)); offset += 1; } value += bin2str(data.subarray(offset, offset + 1)); offset += 1; } const payload = data.subarray(offset, data.byteLength); return { schemeIdUri, value, timeScale, presentationTime, presentationTimeDelta, eventDuration, id, payload }; } function mp4Box(type, ...payload) { const len = payload.length; let size = 8; let i = len; while (i--) { size += payload[i].byteLength; } const result = new Uint8Array(size); result[0] = size >> 24 & 0xff; result[1] = size >> 16 & 0xff; result[2] = size >> 8 & 0xff; result[3] = size & 0xff; result.set(type, 4); for (i = 0, size = 8; i < len; i++) { result.set(payload[i], size); size += payload[i].byteLength; } return result; } function mp4pssh(systemId, keyids, data) { if (systemId.byteLength !== 16) { throw new RangeError('Invalid system id'); } let version; let kids; if (keyids) { version = 1; kids = new Uint8Array(keyids.length * 16); for (let ix = 0; ix < keyids.length; ix++) { const k = keyids[ix]; // uint8array if (k.byteLength !== 16) { throw new RangeError('Invalid key'); } kids.set(k, ix * 16); } } else { version = 0; kids = new Uint8Array(); } let kidCount; if (version > 0) { kidCount = new Uint8Array(4); if (keyids.length > 0) { new DataView(kidCount.buffer).setUint32(0, keyids.length, false); } } else { kidCount = new Uint8Array(); } const dataSize = new Uint8Array(4); if (data && data.byteLength > 0) { new DataView(dataSize.buffer).setUint32(0, data.byteLength, false); } return mp4Box([112, 115, 115, 104], new Uint8Array([version, 0x00, 0x00, 0x00 // Flags ]), systemId, // 16 bytes kidCount, kids, dataSize, data || new Uint8Array()); } function parsePssh(initData) { if (!(initData instanceof ArrayBuffer) || initData.byteLength < 32) { return null; } const result = { version: 0, systemId: '', kids: null, data: null }; const view = new DataView(initData); const boxSize = view.getUint32(0); if (initData.byteLength !== boxSize && boxSize > 44) { return null; } const type = view.getUint32(4); if (type !== 0x70737368) { return null; } result.version = view.getUint32(8) >>> 24; if (result.version > 1) { return null; } result.systemId = Hex.hexDump(new Uint8Array(initData, 12, 16)); const dataSizeOrKidCount = view.getUint32(28); if (result.version === 0) { if (boxSize - 32 < dataSizeOrKidCount) { return null; } result.data = new Uint8Array(initData, 32, dataSizeOrKidCount); } else if (result.version === 1) { result.kids = []; for (let i = 0; i < dataSizeOrKidCount; i++) { result.kids.push(new Uint8Array(initData, 32 + i * 16, 16)); } } return result; } let keyUriToKeyIdMap = {}; class LevelKey { static clearKeyUriToKeyIdMap() { keyUriToKeyIdMap = {}; } constructor(method, uri, format, formatversions = [1], iv = null) { this.uri = void 0; this.method = void 0; this.keyFormat = void 0; this.keyFormatVersions = void 0; this.encrypted = void 0; this.isCommonEncryption = void 0; this.iv = null; this.key = null; this.keyId = null; this.pssh = null; this.method = method; this.uri = uri; this.keyFormat = format; this.keyFormatVersions = formatversions; this.iv = iv; this.encrypted = method ? method !== 'NONE' : false; this.isCommonEncryption = this.encrypted && method !== 'AES-128'; } isSupported() { // If it's Segment encryption or No encryption, just select that key system if (this.method) { if (this.method === 'AES-128' || this.method === 'NONE') { return true; } if (this.keyFormat === 'identity') { // Maintain support for clear SAMPLE-AES with MPEG-3 TS return this.method === 'SAMPLE-AES'; } else { switch (this.keyFormat) { case KeySystemFormats.FAIRPLAY: case KeySystemFormats.WIDEVINE: case KeySystemFormats.PLAYREADY: case KeySystemFormats.CLEARKEY: return ['ISO-23001-7', 'SAMPLE-AES', 'SAMPLE-AES-CENC', 'SAMPLE-AES-CTR'].indexOf(this.method) !== -1; } } } return false; } getDecryptData(sn) { if (!this.encrypted || !this.uri) { return null; } if (this.method === 'AES-128' && this.uri && !this.iv) { if (typeof sn !== 'number') { // We are fetching decryption data for a initialization segment // If the segment was encrypted with AES-128 // It must have an IV defined. We cannot substitute the Segment Number in. if (this.method === 'AES-128' && !this.iv) { logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`); } // Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation. sn = 0; } const iv = createInitializationVector(sn); const decryptdata = new LevelKey(this.method, this.uri, 'identity', this.keyFormatVersions, iv); return decryptdata; } // Initialize keyId if possible const keyBytes = convertDataUriToArrayBytes(this.uri); if (keyBytes) { switch (this.keyFormat) { case KeySystemFormats.WIDEVINE: this.pssh = keyBytes; // In case of widevine keyID is embedded in PSSH box. Read Key ID. if (keyBytes.length >= 22) { this.keyId = keyBytes.subarray(keyBytes.length - 22, keyBytes.length - 6); } break; case KeySystemFormats.PLAYREADY: { const PlayReadyKeySystemUUID = new Uint8Array([0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86, 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95]); this.pssh = mp4pssh(PlayReadyKeySystemUUID, null, keyBytes); const keyBytesUtf16 = new Uint16Array(keyBytes.buffer, keyBytes.byteOffset, keyBytes.byteLength / 2); const keyByteStr = String.fromCharCode.apply(null, Array.from(keyBytesUtf16)); // Parse Playready WRMHeader XML const xmlKeyBytes = keyByteStr.substring(keyByteStr.indexOf('<'), keyByteStr.length); const parser = new DOMParser(); const xmlDoc = parser.parseFromString(xmlKeyBytes, 'text/xml'); const keyData = xmlDoc.getElementsByTagName('KID')[0]; if (keyData) { const keyId = keyData.childNodes[0] ? keyData.childNodes[0].nodeValue : keyData.getAttribute('VALUE'); if (keyId) { const keyIdArray = base64Decode(keyId).subarray(0, 16); // KID value in PRO is a base64-encoded little endian GUID interpretation of UUID // KID value in ‘tenc’ is a big endian UUID GUID interpretation of UUID changeEndianness(keyIdArray); this.keyId = keyIdArray; } } break; } default: { let keydata = keyBytes.subarray(0, 16); if (keydata.length !== 16) { const padded = new Uint8Array(16); padded.set(keydata, 16 - keydata.length); keydata = padded; } this.keyId = keydata; break; } } } // Default behavior: assign a new keyId for each uri if (!this.keyId || this.keyId.byteLength !== 16) { let keyId = keyUriToKeyIdMap[this.uri]; if (!keyId) { const val = Object.keys(keyUriToKeyIdMap).length % Number.MAX_SAFE_INTEGER; keyId = new Uint8Array(16); const dv = new DataView(keyId.buffer, 12, 4); // Just set the last 4 bytes dv.setUint32(0, val); keyUriToKeyIdMap[this.uri] = keyId; } this.keyId = keyId; } return this; } } function createInitializationVector(segmentNumber) { const uint8View = new Uint8Array(16); for (let i = 12; i < 16; i++) { uint8View[i] = segmentNumber >> 8 * (15 - i) & 0xff; } return uint8View; } const VARIABLE_REPLACEMENT_REGEX = /\{\$([a-zA-Z0-9-_]+)\}/g; function hasVariableReferences(str) { return VARIABLE_REPLACEMENT_REGEX.test(str); } function substituteVariablesInAttributes(parsed, attr, attributeNames) { if (parsed.variableList !== null || parsed.hasVariableRefs) { for (let i = attributeNames.length; i--;) { const name = attributeNames[i]; const value = attr[name]; if (value) { attr[name] = substituteVariables(parsed, value); } } } } function substituteVariables(parsed, value) { if (parsed.variableList !== null || parsed.hasVariableRefs) { const variableList = parsed.variableList; return value.replace(VARIABLE_REPLACEMENT_REGEX, variableReference => { const variableName = variableReference.substring(2, variableReference.length - 1); const variableValue = variableList == null ? void 0 : variableList[variableName]; if (variableValue === undefined) { parsed.playlistParsingError || (parsed.playlistParsingError = new Error(`Missing preceding EXT-X-DEFINE tag for Variable Reference: "${variableName}"`)); return variableReference; } return variableValue; }); } return value; } function addVariableDefinition(parsed, attr, parentUrl) { let variableList = parsed.variableList; if (!variableList) { parsed.variableList = variableList = {}; } let NAME; let VALUE; if ('QUERYPARAM' in attr) { NAME = attr.QUERYPARAM; try { const searchParams = new self.URL(parentUrl).searchParams; if (searchParams.has(NAME)) { VALUE = searchParams.get(NAME); } else { throw new Error(`"${NAME}" does not match any query parameter in URI: "${parentUrl}"`); } } catch (error) { parsed.playlistParsingError || (parsed.playlistParsingError = new Error(`EXT-X-DEFINE QUERYPARAM: ${error.message}`)); } } else { NAME = attr.NAME; VALUE = attr.VALUE; } if (NAME in variableList) { parsed.playlistParsingError || (parsed.playlistParsingError = new Error(`EXT-X-DEFINE duplicate Variable Name declarations: "${NAME}"`)); } else { variableList[NAME] = VALUE || ''; } } function importVariableDefinition(parsed, attr, sourceVariableList) { const IMPORT = attr.IMPORT; if (sourceVariableList && IMPORT in sourceVariableList) { let variableList = parsed.variableList; if (!variableList) { parsed.variableList = variableList = {}; } variableList[IMPORT] = sourceVariableList[IMPORT]; } else { parsed.playlistParsingError || (parsed.playlistParsingError = new Error(`EXT-X-DEFINE IMPORT attribute not found in Multivariant Playlist: "${IMPORT}"`)); } } /** * MediaSource helper */ function getMediaSource() { if (typeof self === 'undefined') return undefined; return self.MediaSource || self.WebKitMediaSource; } // from http://mp4ra.org/codecs.html const sampleEntryCodesISO = { audio: { a3ds: true, 'ac-3': true, 'ac-4': true, alac: true, alaw: true, dra1: true, 'dts+': true, 'dts-': true, dtsc: true, dtse: true, dtsh: true, 'ec-3': true, enca: true, g719: true, g726: true, m4ae: true, mha1: true, mha2: true, mhm1: true, mhm2: true, mlpa: true, mp4a: true, 'raw ': true, Opus: true, opus: true, // browsers expect this to be lowercase despite MP4RA says 'Opus' samr: true, sawb: true, sawp: true, sevc: true, sqcp: true, ssmv: true, twos: true, ulaw: true }, video: { avc1: true, avc2: true, avc3: true, avc4: true, avcp: true, av01: true, drac: true, dva1: true, dvav: true, dvh1: true, dvhe: true, encv: true, hev1: true, hvc1: true, mjp2: true, mp4v: true, mvc1: true, mvc2: true, mvc3: true, mvc4: true, resv: true, rv60: true, s263: true, svc1: true, svc2: true, 'vc-1': true, vp08: true, vp09: true }, text: { stpp: true, wvtt: true } }; const MediaSource$2 = getMediaSource(); function isCodecType(codec, type) { const typeCodes = sampleEntryCodesISO[type]; return !!typeCodes && typeCodes[codec.slice(0, 4)] === true; } function isCodecSupportedInMp4(codec, type) { var _MediaSource$isTypeSu; return (_MediaSource$isTypeSu = MediaSource$2 == null ? void 0 : MediaSource$2.isTypeSupported(`${type || 'video'}/mp4;codecs="${codec}"`)) != null ? _MediaSource$isTypeSu : false; } const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g; const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g; const IS_MEDIA_PLAYLIST = /^#EXT(?:INF|-X-TARGETDURATION):/m; // Handle empty Media Playlist (first EXTINF not signaled, but TARGETDURATION present) const LEVEL_PLAYLIST_REGEX_FAST = new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:,), group 1 => duration, group 2 => title /(?!#) *(\S[\S ]*)/.source, // segment URI, group 3 => the URI (note newline is not eaten) /#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y) /#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec /#.*/.source // All other non-segment oriented tags will match with all groups empty ].join('|'), 'g'); const LEVEL_PLAYLIST_REGEX_SLOW = new RegExp([/#(EXTM3U)/.source, /#EXT-X-(DATERANGE|DEFINE|KEY|MAP|PART|PART-INF|PLAYLIST-TYPE|PRELOAD-HINT|RENDITION-REPORT|SERVER-CONTROL|SKIP|START):(.+)/.source, /#EXT-X-(BITRATE|DISCONTINUITY-SEQUENCE|MEDIA-SEQUENCE|TARGETDURATION|VERSION): *(\d+)/.source, /#EXT-X-(DISCONTINUITY|ENDLIST|GAP)/.source, /(#)([^:]*):(.*)/.source, /(#)(.*)(?:.*)\r?\n?/.source].join('|')); class M3U8Parser$1 { static findGroup(groups, mediaGroupId) { for (let i = 0; i < groups.length; i++) { const group = groups[i]; if (group.id === mediaGroupId) { return group; } } } static convertAVC1ToAVCOTI(codec) { // Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported const avcdata = codec.split('.'); if (avcdata.length > 2) { let result = avcdata.shift() + '.'; result += parseInt(avcdata.shift()).toString(16); result += ('000' + parseInt(avcdata.shift()).toString(16)).slice(-4); return result; } return codec; } static resolve(url, baseUrl) { return urlToolkitExports.buildAbsoluteURL(baseUrl, url, { alwaysNormalize: true }); } static isMediaPlaylist(str) { return IS_MEDIA_PLAYLIST.test(str); } static parseMasterPlaylist(string, baseurl) { const hasVariableRefs = hasVariableReferences(string) ; const parsed = { contentSteering: null, levels: [], playlistParsingError: null, sessionData: null, sessionKeys: null, startTimeOffset: null, variableList: null, hasVariableRefs }; const levelsWithKnownCodecs = []; MASTER_PLAYLIST_REGEX.lastIndex = 0; let result; while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) { if (result[1]) { var _level$unknownCodecs; // '#EXT-X-STREAM-INF' is found, parse level tag in group 1 const attrs = new AttrList(result[1]); { substituteVariablesInAttributes(parsed, attrs, ['CODECS', 'SUPPLEMENTAL-CODECS', 'ALLOWED-CPC', 'PATHWAY-ID', 'STABLE-VARIANT-ID', 'AUDIO', 'VIDEO', 'SUBTITLES', 'CLOSED-CAPTIONS', 'NAME']); } const uri = substituteVariables(parsed, result[2]) ; const level = { attrs, bitrate: attrs.decimalInteger('AVERAGE-BANDWIDTH') || attrs.decimalInteger('BANDWIDTH'), name: attrs.NAME, url: M3U8Parser$1.resolve(uri, baseurl) }; const resolution = attrs.decimalResolution('RESOLUTION'); if (resolution) { level.width = resolution.width; level.height = resolution.height; } setCodecs((attrs.CODECS || '').split(/[ ,]+/).filter(c => c), level); if (level.videoCodec && level.videoCodec.indexOf('avc1') !== -1) { level.videoCodec = M3U8Parser$1.convertAVC1ToAVCOTI(level.videoCodec); } if (!((_level$unknownCodecs = level.unknownCodecs) != null && _level$unknownCodecs.length)) { levelsWithKnownCodecs.push(level); } parsed.levels.push(level); } else if (result[3]) { const tag = result[3]; const attributes = result[4]; switch (tag) { case 'SESSION-DATA': { // #EXT-X-SESSION-DATA const sessionAttrs = new AttrList(attributes); { substituteVariablesInAttributes(parsed, sessionAttrs, ['DATA-ID', 'LANGUAGE', 'VALUE', 'URI']); } const dataId = sessionAttrs['DATA-ID']; if (dataId) { if (parsed.sessionData === null) { parsed.sessionData = {}; } parsed.sessionData[dataId] = sessionAttrs; } break; } case 'SESSION-KEY': { // #EXT-X-SESSION-KEY const sessionKey = parseKey(attributes, baseurl, parsed); if (sessionKey.encrypted && sessionKey.isSupported()) { if (parsed.sessionKeys === null) { parsed.sessionKeys = []; } parsed.sessionKeys.push(sessionKey); } else { logger.warn(`[Keys] Ignoring invalid EXT-X-SESSION-KEY tag: "${attributes}"`); } break; } case 'DEFINE': { // #EXT-X-DEFINE { const variableAttributes = new AttrList(attributes); substituteVariablesInAttributes(parsed, variableAttributes, ['NAME', 'VALUE', 'QUERYPARAM']); addVariableDefinition(parsed, variableAttributes, baseurl); } break; } case 'CONTENT-STEERING': { // #EXT-X-CONTENT-STEERING const contentSteeringAttributes = new AttrList(attributes); { substituteVariablesInAttributes(parsed, contentSteeringAttributes, ['SERVER-URI', 'PATHWAY-ID']); } parsed.contentSteering = { uri: M3U8Parser$1.resolve(contentSteeringAttributes['SERVER-URI'], baseurl), pathwayId: contentSteeringAttributes['PATHWAY-ID'] || '.' }; break; } case 'START': { // #EXT-X-START parsed.startTimeOffset = parseStartTimeOffset(attributes); break; } } } } // Filter out levels with unknown codecs if it does not remove all levels const stripUnknownCodecLevels = levelsWithKnownCodecs.length > 0 && levelsWithKnownCodecs.length < parsed.levels.length; parsed.levels = stripUnknownCodecLevels ? levelsWithKnownCodecs : parsed.levels; if (parsed.levels.length === 0) { parsed.playlistParsingError = new Error('no levels found in manifest'); } return parsed; } static parseMasterPlaylistMedia(string, baseurl, parsed) { let result; const results = {}; const levels = parsed.levels; const groupsByType = { AUDIO: levels.map(level => ({ id: level.attrs.AUDIO, audioCodec: level.audioCodec })), SUBTITLES: levels.map(level => ({ id: level.attrs.SUBTITLES, textCodec: level.textCodec })), 'CLOSED-CAPTIONS': [] }; let id = 0; MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0; while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) { const attrs = new AttrList(result[1]); const type = attrs.TYPE; if (type) { const groups = groupsByType[type]; const medias = results[type] || []; results[type] = medias; { substituteVariablesInAttributes(parsed, attrs, ['URI', 'GROUP-ID', 'LANGUAGE', 'ASSOC-LANGUAGE', 'STABLE-RENDITION-ID', 'NAME', 'INSTREAM-ID', 'CHARACTERISTICS', 'CHANNELS']); } const media = { attrs, bitrate: 0, id: id++, groupId: attrs['GROUP-ID'] || '', instreamId: attrs['INSTREAM-ID'], name: attrs.NAME || attrs.LANGUAGE || '', type, default: attrs.bool('DEFAULT'), autoselect: attrs.bool('AUTOSELECT'), forced: attrs.bool('FORCED'), lang: attrs.LANGUAGE, url: attrs.URI ? M3U8Parser$1.resolve(attrs.URI, baseurl) : '' }; if (groups != null && groups.length) { // If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track // If we don't find the track signalled, lets use the first audio groups codec we have // Acting as a best guess const groupCodec = M3U8Parser$1.findGroup(groups, media.groupId) || groups[0]; assignCodec(media, groupCodec, 'audioCodec'); assignCodec(media, groupCodec, 'textCodec'); } medias.push(media); } } return results; } static parseLevelPlaylist(string, baseurl, id, type, levelUrlId, multivariantVariableList) { const level = new LevelDetails(baseurl); const fragments = level.fragments; // The most recent init segment seen (applies to all subsequent segments) let currentInitSegment = null; let currentSN = 0; let currentPart = 0; let totalduration = 0; let discontinuityCounter = 0; let prevFrag = null; let frag = new Fragment(type, baseurl); let result; let i; let levelkeys; let firstPdtIndex = -1; let createNextFrag = false; LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0; level.m3u8 = string; level.hasVariableRefs = hasVariableReferences(string) ; while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) { if (createNextFrag) { createNextFrag = false; frag = new Fragment(type, baseurl); // setup the next fragment for part loading frag.start = totalduration; frag.sn = currentSN; frag.cc = discontinuityCounter; frag.level = id; if (currentInitSegment) { frag.initSegment = currentInitSegment; frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime; currentInitSegment.rawProgramDateTime = null; } } const duration = result[1]; if (duration) { // INF frag.duration = parseFloat(duration); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939 const title = (' ' + result[2]).slice(1); frag.title = title || null; frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]); } else if (result[3]) { // url if (isFiniteNumber(frag.duration)) { frag.start = totalduration; if (levelkeys) { setFragLevelKeys(frag, levelkeys, level); } frag.sn = currentSN; frag.level = id; frag.cc = discontinuityCounter; frag.urlId = levelUrlId; fragments.push(frag); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939 const uri = (' ' + result[3]).slice(1); frag.relurl = substituteVariables(level, uri) ; assignProgramDateTime(frag, prevFrag); prevFrag = frag; totalduration += frag.duration; currentSN++; currentPart = 0; createNextFrag = true; } } else if (result[4]) { // X-BYTERANGE const data = (' ' + result[4]).slice(1); if (prevFrag) { frag.setByteRange(data, prevFrag); } else { frag.setByteRange(data); } } else if (result[5]) { // PROGRAM-DATE-TIME // avoid sliced strings https://github.com/video-dev/hls.js/issues/939 frag.rawProgramDateTime = (' ' + result[5]).slice(1); frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]); if (firstPdtIndex === -1) { firstPdtIndex = fragments.length; } } else { result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW); if (!result) { logger.warn('No matches on slow regex match for level playlist!'); continue; } for (i = 1; i < result.length; i++) { if (typeof result[i] !== 'undefined') { break; } } // avoid sliced strings https://github.com/video-dev/hls.js/issues/939 const tag = (' ' + result[i]).slice(1); const value1 = (' ' + result[i + 1]).slice(1); const value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : ''; switch (tag) { case 'PLAYLIST-TYPE': level.type = value1.toUpperCase(); break; case 'MEDIA-SEQUENCE': currentSN = level.startSN = parseInt(value1); break; case 'SKIP': { const skipAttrs = new AttrList(value1); { substituteVariablesInAttributes(level, skipAttrs, ['RECENTLY-REMOVED-DATERANGES']); } const skippedSegments = skipAttrs.decimalInteger('SKIPPED-SEGMENTS'); if (isFiniteNumber(skippedSegments)) { level.skippedSegments = skippedSegments; // This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails` for (let _i = skippedSegments; _i--;) { fragments.unshift(null); } currentSN += skippedSegments; } const recentlyRemovedDateranges = skipAttrs.enumeratedString('RECENTLY-REMOVED-DATERANGES'); if (recentlyRemovedDateranges) { level.recentlyRemovedDateranges = recentlyRemovedDateranges.split('\t'); } break; } case 'TARGETDURATION': level.targetduration = Math.max(parseInt(value1), 1); break; case 'VERSION': level.version = parseInt(value1); break; case 'EXTM3U': break; case 'ENDLIST': level.live = false; break; case '#': if (value1 || value2) { frag.tagList.push(value2 ? [value1, value2] : [value1]); } break; case 'DISCONTINUITY': discontinuityCounter++; frag.tagList.push(['DIS']); break; case 'GAP': frag.gap = true; frag.tagList.push([tag]); break; case 'BITRATE': frag.tagList.push([tag, value1]); break; case 'DATERANGE': { const dateRangeAttr = new AttrList(value1); { substituteVariablesInAttributes(level, dateRangeAttr, ['ID', 'CLASS', 'START-DATE', 'END-DATE', 'SCTE35-CMD', 'SCTE35-OUT', 'SCTE35-IN']); substituteVariablesInAttributes(level, dateRangeAttr, dateRangeAttr.clientAttrs); } const dateRange = new DateRange(dateRangeAttr, level.dateRanges[dateRangeAttr.ID]); if (dateRange.isValid || level.skippedSegments) { level.dateRanges[dateRange.id] = dateRange; } else { logger.warn(`Ignoring invalid DATERANGE tag: "${value1}"`); } // Add to fragment tag list for backwards compatibility (< v1.2.0) frag.tagList.push(['EXT-X-DATERANGE', value1]); break; } case 'DEFINE': { { const variableAttributes = new AttrList(value1); substituteVariablesInAttributes(level, variableAttributes, ['NAME', 'VALUE', 'IMPORT', 'QUERYPARAM']); if ('IMPORT' in variableAttributes) { importVariableDefinition(level, variableAttributes, multivariantVariableList); } else { addVariableDefinition(level, variableAttributes, baseurl); } } break; } case 'DISCONTINUITY-SEQUENCE': discontinuityCounter = parseInt(value1); break; case 'KEY': { const levelKey = parseKey(value1, baseurl, level); if (levelKey.isSupported()) { if (levelKey.method === 'NONE') { levelkeys = undefined; break; } if (!levelkeys) { levelkeys = {}; } if (levelkeys[levelKey.keyFormat]) { levelkeys = _extends({}, levelkeys); } levelkeys[levelKey.keyFormat] = levelKey; } else { logger.warn(`[Keys] Ignoring invalid EXT-X-KEY tag: "${value1}"`); } break; } case 'START': level.startTimeOffset = parseStartTimeOffset(value1); break; case 'MAP': { const mapAttrs = new AttrList(value1); { substituteVariablesInAttributes(level, mapAttrs, ['BYTERANGE', 'URI']); } if (frag.duration) { // Initial segment tag is after segment duration tag. // #EXTINF: 6.0 // #EXT-X-MAP:URI="init.mp4 const init = new Fragment(type, baseurl); setInitSegment(init, mapAttrs, id, levelkeys); currentInitSegment = init; frag.initSegment = currentInitSegment; if (currentInitSegment.rawProgramDateTime && !frag.rawProgramDateTime) { frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime; } } else { // Initial segment tag is before segment duration tag setInitSegment(frag, mapAttrs, id, levelkeys); currentInitSegment = frag; createNextFrag = true; } break; } case 'SERVER-CONTROL': { const serverControlAttrs = new AttrList(value1); level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD'); level.canSkipUntil = serverControlAttrs.optionalFloat('CAN-SKIP-UNTIL', 0); level.canSkipDateRanges = level.canSkipUntil > 0 && serverControlAttrs.bool('CAN-SKIP-DATERANGES'); level.partHoldBack = serverControlAttrs.optionalFloat('PART-HOLD-BACK', 0); level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0); break; } case 'PART-INF': { const partInfAttrs = new AttrList(value1); level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET'); break; } case 'PART': { let partList = level.partList; if (!partList) { partList = level.partList = []; } const previousFragmentPart = currentPart > 0 ? partList[partList.length - 1] : undefined; const index = currentPart++; const partAttrs = new AttrList(value1); { substituteVariablesInAttributes(level, partAttrs, ['BYTERANGE', 'URI']); } const part = new Part(partAttrs, frag, baseurl, index, previousFragmentPart); partList.push(part); frag.duration += part.duration; break; } case 'PRELOAD-HINT': { const preloadHintAttrs = new AttrList(value1); { substituteVariablesInAttributes(level, preloadHintAttrs, ['URI']); } level.preloadHint = preloadHintAttrs; break; } case 'RENDITION-REPORT': { const renditionReportAttrs = new AttrList(value1); { substituteVariablesInAttributes(level, renditionReportAttrs, ['URI']); } level.renditionReports = level.renditionReports || []; level.renditionReports.push(renditionReportAttrs); break; } default: logger.warn(`line parsed but not handled: ${result}`); break; } } } if (prevFrag && !prevFrag.relurl) { fragments.pop(); totalduration -= prevFrag.duration; if (level.partList) { level.fragmentHint = prevFrag; } } else if (level.partList) { assignProgramDateTime(frag, prevFrag); frag.cc = discontinuityCounter; level.fragmentHint = frag; if (levelkeys) { setFragLevelKeys(frag, levelkeys, level); } } const fragmentLength = fragments.length; const firstFragment = fragments[0]; const lastFragment = fragments[fragmentLength - 1]; totalduration += level.skippedSegments * level.targetduration; if (totalduration > 0 && fragmentLength && lastFragment) { level.averagetargetduration = totalduration / fragmentLength; const lastSn = lastFragment.sn; level.endSN = lastSn !== 'initSegment' ? lastSn : 0; if (!level.live) { lastFragment.endList = true; } if (firstFragment) { level.startCC = firstFragment.cc; } } else { level.endSN = 0; level.startCC = 0; } if (level.fragmentHint) { totalduration += level.fragmentHint.duration; } level.totalduration = totalduration; level.endCC = discontinuityCounter; /** * Backfill any missing PDT values * "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after * one or more Media Segment URIs, the client SHOULD extrapolate * backward from that tag (using EXTINF durations and/or media * timestamps) to associate dates with those segments." * We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs * computed. */ if (firstPdtIndex > 0) { backfillProgramDateTimes(fragments, firstPdtIndex); } return level; } } function parseKey(keyTagAttributes, baseurl, parsed) { var _keyAttrs$METHOD, _keyAttrs$KEYFORMAT; // https://tools.ietf.org/html/rfc8216#section-4.3.2.4 const keyAttrs = new AttrList(keyTagAttributes); { substituteVariablesInAttributes(parsed, keyAttrs, ['KEYFORMAT', 'KEYFORMATVERSIONS', 'URI', 'IV', 'URI']); } const decryptmethod = (_keyAttrs$METHOD = keyAttrs.METHOD) != null ? _keyAttrs$METHOD : ''; const decrypturi = keyAttrs.URI; const decryptiv = keyAttrs.hexadecimalInteger('IV'); const decryptkeyformatversions = keyAttrs.KEYFORMATVERSIONS; // From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity". const decryptkeyformat = (_keyAttrs$KEYFORMAT = keyAttrs.KEYFORMAT) != null ? _keyAttrs$KEYFORMAT : 'identity'; if (decrypturi && keyAttrs.IV && !decryptiv) { logger.error(`Invalid IV: ${keyAttrs.IV}`); } // If decrypturi is a URI with a scheme, then baseurl will be ignored // No uri is allowed when METHOD is NONE const resolvedUri = decrypturi ? M3U8Parser$1.resolve(decrypturi, baseurl) : ''; const keyFormatVersions = (decryptkeyformatversions ? decryptkeyformatversions : '1').split('/').map(Number).filter(Number.isFinite); return new LevelKey(decryptmethod, resolvedUri, decryptkeyformat, keyFormatVersions, decryptiv); } function parseStartTimeOffset(startAttributes) { const startAttrs = new AttrList(startAttributes); const startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET'); if (isFiniteNumber(startTimeOffset)) { return startTimeOffset; } return null; } function setCodecs(codecs, level) { ['video', 'audio', 'text'].forEach(type => { const filtered = codecs.filter(codec => isCodecType(codec, type)); if (filtered.length) { const preferred = filtered.filter(codec => { return codec.lastIndexOf('avc1', 0) === 0 || codec.lastIndexOf('mp4a', 0) === 0; }); level[`${type}Codec`] = preferred.length > 0 ? preferred[0] : filtered[0]; // remove from list codecs = codecs.filter(codec => filtered.indexOf(codec) === -1); } }); level.unknownCodecs = codecs; } function assignCodec(media, groupItem, codecProperty) { const codecValue = groupItem[codecProperty]; if (codecValue) { media[codecProperty] = codecValue; } } function backfillProgramDateTimes(fragments, firstPdtIndex) { let fragPrev = fragments[firstPdtIndex]; for (let i = firstPdtIndex; i--;) { const frag = fragments[i]; // Exit on delta-playlist skipped segments if (!frag) { return; } frag.programDateTime = fragPrev.programDateTime - frag.duration * 1000; fragPrev = frag; } } function assignProgramDateTime(frag, prevFrag) { if (frag.rawProgramDateTime) { frag.programDateTime = Date.parse(frag.rawProgramDateTime); } else if (prevFrag != null && prevFrag.programDateTime) { frag.programDateTime = prevFrag.endProgramDateTime; } if (!isFiniteNumber(frag.programDateTime)) { frag.programDateTime = null; frag.rawProgramDateTime = null; } } function setInitSegment(frag, mapAttrs, id, levelkeys) { frag.relurl = mapAttrs.URI; if (mapAttrs.BYTERANGE) { frag.setByteRange(mapAttrs.BYTERANGE); } frag.level = id; frag.sn = 'initSegment'; if (levelkeys) { frag.levelkeys = levelkeys; } frag.initSegment = null; } function setFragLevelKeys(frag, levelkeys, level) { frag.levelkeys = levelkeys; const { encryptedFragments } = level; if ((!encryptedFragments.length || encryptedFragments[encryptedFragments.length - 1].levelkeys !== levelkeys) && Object.keys(levelkeys).some(format => levelkeys[format].isCommonEncryption)) { encryptedFragments.push(frag); } } var PlaylistContextType = { MANIFEST: "manifest", LEVEL: "level", AUDIO_TRACK: "audioTrack", SUBTITLE_TRACK: "subtitleTrack" }; var PlaylistLevelType = { MAIN: "main", AUDIO: "audio", SUBTITLE: "subtitle" }; function mapContextToLevelType(context) { const { type } = context; switch (type) { case PlaylistContextType.AUDIO_TRACK: return PlaylistLevelType.AUDIO; case PlaylistContextType.SUBTITLE_TRACK: return PlaylistLevelType.SUBTITLE; default: return PlaylistLevelType.MAIN; } } function getResponseUrl(response, context) { let url = response.url; // responseURL not supported on some browsers (it is used to detect URL redirection) // data-uri mode also not supported (but no need to detect redirection) if (url === undefined || url.indexOf('data:') === 0) { // fallback to initial URL url = context.url; } return url; } class PlaylistLoader { constructor(hls) { this.hls = void 0; this.loaders = Object.create(null); this.variableList = null; this.hls = hls; this.registerListeners(); } startLoad(startPosition) {} stopLoad() { this.destroyInternalLoaders(); } registerListeners() { const { hls } = this; hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.on(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this); hls.on(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this); } unregisterListeners() { const { hls } = this; hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.off(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this); hls.off(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this); } /** * Returns defaults or configured loader-type overloads (pLoader and loader config params) */ createInternalLoader(context) { const config = this.hls.config; const PLoader = config.pLoader; const Loader = config.loader; const InternalLoader = PLoader || Loader; const loader = new InternalLoader(config); this.loaders[context.type] = loader; return loader; } getInternalLoader(context) { return this.loaders[context.type]; } resetInternalLoader(contextType) { if (this.loaders[contextType]) { delete this.loaders[contextType]; } } /** * Call `destroy` on all internal loader instances mapped (one per context type) */ destroyInternalLoaders() { for (const contextType in this.loaders) { const loader = this.loaders[contextType]; if (loader) { loader.destroy(); } this.resetInternalLoader(contextType); } } destroy() { this.variableList = null; this.unregisterListeners(); this.destroyInternalLoaders(); } onManifestLoading(event, data) { const { url } = data; this.variableList = null; this.load({ id: null, level: 0, responseType: 'text', type: PlaylistContextType.MANIFEST, url, deliveryDirectives: null }); } onLevelLoading(event, data) { const { id, level, url, deliveryDirectives } = data; this.load({ id, level, responseType: 'text', type: PlaylistContextType.LEVEL, url, deliveryDirectives }); } onAudioTrackLoading(event, data) { const { id, groupId, url, deliveryDirectives } = data; this.load({ id, groupId, level: null, responseType: 'text', type: PlaylistContextType.AUDIO_TRACK, url, deliveryDirectives }); } onSubtitleTrackLoading(event, data) { const { id, groupId, url, deliveryDirectives } = data; this.load({ id, groupId, level: null, responseType: 'text', type: PlaylistContextType.SUBTITLE_TRACK, url, deliveryDirectives }); } load(context) { var _context$deliveryDire; const config = this.hls.config; // logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`); // Check if a loader for this context already exists let loader = this.getInternalLoader(context); if (loader) { const loaderContext = loader.context; if (loaderContext && loaderContext.url === context.url) { // same URL can't overlap logger.trace('[playlist-loader]: playlist request ongoing'); return; } logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`); loader.abort(); } // apply different configs for retries depending on // context (manifest, level, audio/subs playlist) let loadPolicy; if (context.type === PlaylistContextType.MANIFEST) { loadPolicy = config.manifestLoadPolicy.default; } else { loadPolicy = _extends({}, config.playlistLoadPolicy.default, { timeoutRetry: null, errorRetry: null }); } loader = this.createInternalLoader(context); // Override level/track timeout for LL-HLS requests // (the default of 10000ms is counter productive to blocking playlist reload requests) if ((_context$deliveryDire = context.deliveryDirectives) != null && _context$deliveryDire.part) { let levelDetails; if (context.type === PlaylistContextType.LEVEL && context.level !== null) { levelDetails = this.hls.levels[context.level].details; } else if (context.type === PlaylistContextType.AUDIO_TRACK && context.id !== null) { levelDetails = this.hls.audioTracks[context.id].details; } else if (context.type === PlaylistContextType.SUBTITLE_TRACK && context.id !== null) { levelDetails = this.hls.subtitleTracks[context.id].details; } if (levelDetails) { const partTarget = levelDetails.partTarget; const targetDuration = levelDetails.targetduration; if (partTarget && targetDuration) { const maxLowLatencyPlaylistRefresh = Math.max(partTarget * 3, targetDuration * 0.8) * 1000; loadPolicy = _extends({}, loadPolicy, { maxTimeToFirstByteMs: Math.min(maxLowLatencyPlaylistRefresh, loadPolicy.maxTimeToFirstByteMs), maxLoadTimeMs: Math.min(maxLowLatencyPlaylistRefresh, loadPolicy.maxTimeToFirstByteMs) }); } } } const legacyRetryCompatibility = loadPolicy.errorRetry || loadPolicy.timeoutRetry || {}; const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: legacyRetryCompatibility.maxNumRetry || 0, retryDelay: legacyRetryCompatibility.retryDelayMs || 0, maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0 }; const loaderCallbacks = { onSuccess: (response, stats, context, networkDetails) => { const loader = this.getInternalLoader(context); this.resetInternalLoader(context.type); const string = response.data; // Validate if it is an M3U8 at all if (string.indexOf('#EXTM3U') !== 0) { this.handleManifestParsingError(response, context, new Error('no EXTM3U delimiter'), networkDetails || null, stats); return; } stats.parsing.start = performance.now(); if (M3U8Parser$1.isMediaPlaylist(string)) { this.handleTrackOrLevelPlaylist(response, stats, context, networkDetails || null, loader); } else { this.handleMasterPlaylist(response, stats, context, networkDetails); } }, onError: (response, context, networkDetails, stats) => { this.handleNetworkError(context, networkDetails, false, response, stats); }, onTimeout: (stats, context, networkDetails) => { this.handleNetworkError(context, networkDetails, true, undefined, stats); } }; // logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`); loader.load(context, loaderConfig, loaderCallbacks); } handleMasterPlaylist(response, stats, context, networkDetails) { const hls = this.hls; const string = response.data; const url = getResponseUrl(response, context); const parsedResult = M3U8Parser$1.parseMasterPlaylist(string, url); if (parsedResult.playlistParsingError) { this.handleManifestParsingError(response, context, parsedResult.playlistParsingError, networkDetails, stats); return; } const { contentSteering, levels, sessionData, sessionKeys, startTimeOffset, variableList } = parsedResult; this.variableList = variableList; const { AUDIO: audioTracks = [], SUBTITLES: subtitles, 'CLOSED-CAPTIONS': captions } = M3U8Parser$1.parseMasterPlaylistMedia(string, url, parsedResult); if (audioTracks.length) { // check if we have found an audio track embedded in main playlist (audio track without URI attribute) const embeddedAudioFound = audioTracks.some(audioTrack => !audioTrack.url); // if no embedded audio track defined, but audio codec signaled in quality level, // we need to signal this main audio track this could happen with playlists with // alt audio rendition in which quality levels (main) // contains both audio+video. but with mixed audio track not signaled if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) { logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one'); audioTracks.unshift({ type: 'main', name: 'main', groupId: 'main', default: false, autoselect: false, forced: false, id: -1, attrs: new AttrList({}), bitrate: 0, url: '' }); } } hls.trigger(Events.MANIFEST_LOADED, { levels, audioTracks, subtitles, captions, contentSteering, url, stats, networkDetails, sessionData, sessionKeys, startTimeOffset, variableList }); } handleTrackOrLevelPlaylist(response, stats, context, networkDetails, loader) { const hls = this.hls; const { id, level, type } = context; const url = getResponseUrl(response, context); const levelUrlId = isFiniteNumber(id) ? id : 0; const levelId = isFiniteNumber(level) ? level : levelUrlId; const levelType = mapContextToLevelType(context); const levelDetails = M3U8Parser$1.parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId, this.variableList); // We have done our first request (Manifest-type) and receive // not a master playlist but a chunk-list (track/level) // We fire the manifest-loaded event anyway with the parsed level-details // by creating a single-level structure for it. if (type === PlaylistContextType.MANIFEST) { const singleLevel = { attrs: new AttrList({}), bitrate: 0, details: levelDetails, name: '', url }; hls.trigger(Events.MANIFEST_LOADED, { levels: [singleLevel], audioTracks: [], url, stats, networkDetails, sessionData: null, sessionKeys: null, contentSteering: null, startTimeOffset: null, variableList: null }); } // save parsing time stats.parsing.end = performance.now(); // extend the context with the new levelDetails property context.levelDetails = levelDetails; this.handlePlaylistLoaded(levelDetails, response, stats, context, networkDetails, loader); } handleManifestParsingError(response, context, error, networkDetails, stats) { this.hls.trigger(Events.ERROR, { type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: context.type === PlaylistContextType.MANIFEST, url: response.url, err: error, error, reason: error.message, response, context, networkDetails, stats }); } handleNetworkError(context, networkDetails, timeout = false, response, stats) { let message = `A network ${timeout ? 'timeout' : 'error' + (response ? ' (status ' + response.code + ')' : '')} occurred while loading ${context.type}`; if (context.type === PlaylistContextType.LEVEL) { message += `: ${context.level} id: ${context.id}`; } else if (context.type === PlaylistContextType.AUDIO_TRACK || context.type === PlaylistContextType.SUBTITLE_TRACK) { message += ` id: ${context.id} group-id: "${context.groupId}"`; } const error = new Error(message); logger.warn(`[playlist-loader]: ${message}`); let details = ErrorDetails.UNKNOWN; let fatal = false; const loader = this.getInternalLoader(context); switch (context.type) { case PlaylistContextType.MANIFEST: details = timeout ? ErrorDetails.MANIFEST_LOAD_TIMEOUT : ErrorDetails.MANIFEST_LOAD_ERROR; fatal = true; break; case PlaylistContextType.LEVEL: details = timeout ? ErrorDetails.LEVEL_LOAD_TIMEOUT : ErrorDetails.LEVEL_LOAD_ERROR; fatal = false; break; case PlaylistContextType.AUDIO_TRACK: details = timeout ? ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT : ErrorDetails.AUDIO_TRACK_LOAD_ERROR; fatal = false; break; case PlaylistContextType.SUBTITLE_TRACK: details = timeout ? ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT : ErrorDetails.SUBTITLE_LOAD_ERROR; fatal = false; break; } if (loader) { this.resetInternalLoader(context.type); } const errorData = { type: ErrorTypes.NETWORK_ERROR, details, fatal, url: context.url, loader, context, error, networkDetails, stats }; if (response) { const url = (networkDetails == null ? void 0 : networkDetails.url) || context.url; errorData.response = _objectSpread2({ url, data: undefined }, response); } this.hls.trigger(Events.ERROR, errorData); } handlePlaylistLoaded(levelDetails, response, stats, context, networkDetails, loader) { const hls = this.hls; const { type, level, id, groupId, deliveryDirectives } = context; const url = getResponseUrl(response, context); const parent = mapContextToLevelType(context); const levelIndex = typeof context.level === 'number' && parent === PlaylistLevelType.MAIN ? level : undefined; if (!levelDetails.fragments.length) { const _error = new Error('No Segments found in Playlist'); hls.trigger(Events.ERROR, { type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.LEVEL_EMPTY_ERROR, fatal: false, url, error: _error, reason: _error.message, response, context, level: levelIndex, parent, networkDetails, stats }); return; } if (!levelDetails.targetduration) { levelDetails.playlistParsingError = new Error('Missing Target Duration'); } const error = levelDetails.playlistParsingError; if (error) { hls.trigger(Events.ERROR, { type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.LEVEL_PARSING_ERROR, fatal: false, url, error, reason: error.message, response, context, level: levelIndex, parent, networkDetails, stats }); return; } if (levelDetails.live && loader) { if (loader.getCacheAge) { levelDetails.ageHeader = loader.getCacheAge() || 0; } if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) { levelDetails.ageHeader = 0; } } switch (type) { case PlaylistContextType.MANIFEST: case PlaylistContextType.LEVEL: hls.trigger(Events.LEVEL_LOADED, { details: levelDetails, level: levelIndex || 0, id: id || 0, stats, networkDetails, deliveryDirectives }); break; case PlaylistContextType.AUDIO_TRACK: hls.trigger(Events.AUDIO_TRACK_LOADED, { details: levelDetails, id: id || 0, groupId: groupId || '', stats, networkDetails, deliveryDirectives }); break; case PlaylistContextType.SUBTITLE_TRACK: hls.trigger(Events.SUBTITLE_TRACK_LOADED, { details: levelDetails, id: id || 0, groupId: groupId || '', stats, networkDetails, deliveryDirectives }); break; } } } function sendAddTrackEvent(track, videoEl) { let event; try { event = new Event('addtrack'); } catch (err) { // for IE11 event = document.createEvent('Event'); event.initEvent('addtrack', false, false); } event.track = track; videoEl.dispatchEvent(event); } function addCueToTrack(track, cue) { // Sometimes there are cue overlaps on segmented vtts so the same // cue can appear more than once in different vtt files. // This avoid showing duplicated cues with same timecode and text. const mode = track.mode; if (mode === 'disabled') { track.mode = 'hidden'; } if (track.cues && !track.cues.getCueById(cue.id)) { try { track.addCue(cue); if (!track.cues.getCueById(cue.id)) { throw new Error(`addCue is failed for: ${cue}`); } } catch (err) { logger.debug(`[texttrack-utils]: ${err}`); try { const textTrackCue = new self.TextTrackCue(cue.startTime, cue.endTime, cue.text); textTrackCue.id = cue.id; track.addCue(textTrackCue); } catch (err2) { logger.debug(`[texttrack-utils]: Legacy TextTrackCue fallback failed: ${err2}`); } } } if (mode === 'disabled') { track.mode = mode; } } function clearCurrentCues(track) { // When track.mode is disabled, track.cues will be null. // To guarantee the removal of cues, we need to temporarily // change the mode to hidden const mode = track.mode; if (mode === 'disabled') { track.mode = 'hidden'; } if (track.cues) { for (let i = track.cues.length; i--;) { track.removeCue(track.cues[i]); } } if (mode === 'disabled') { track.mode = mode; } } function removeCuesInRange(track, start, end, predicate) { const mode = track.mode; if (mode === 'disabled') { track.mode = 'hidden'; } if (track.cues && track.cues.length > 0) { const cues = getCuesInRange(track.cues, start, end); for (let i = 0; i < cues.length; i++) { if (!predicate || predicate(cues[i])) { track.removeCue(cues[i]); } } } if (mode === 'disabled') { track.mode = mode; } } // Find first cue starting after given time. // Modified version of binary search O(log(n)). function getFirstCueIndexAfterTime(cues, time) { // If first cue starts after time, start there if (time < cues[0].startTime) { return 0; } // If the last cue ends before time there is no overlap const len = cues.length - 1; if (time > cues[len].endTime) { return -1; } let left = 0; let right = len; while (left <= right) { const mid = Math.floor((right + left) / 2); if (time < cues[mid].startTime) { right = mid - 1; } else if (time > cues[mid].startTime && left < len) { left = mid + 1; } else { // If it's not lower or higher, it must be equal. return mid; } } // At this point, left and right have swapped. // No direct match was found, left or right element must be the closest. Check which one has the smallest diff. return cues[left].startTime - time < time - cues[right].startTime ? left : right; } function getCuesInRange(cues, start, end) { const cuesFound = []; const firstCueInRange = getFirstCueIndexAfterTime(cues, start); if (firstCueInRange > -1) { for (let i = firstCueInRange, len = cues.length; i < len; i++) { const cue = cues[i]; if (cue.startTime >= start && cue.endTime <= end) { cuesFound.push(cue); } else if (cue.startTime > end) { return cuesFound; } } } return cuesFound; } var MetadataSchema = { audioId3: "org.id3", dateRange: "com.apple.quicktime.HLS", emsg: "https://aomedia.org/emsg/ID3" }; const MIN_CUE_DURATION = 0.25; function getCueClass() { if (typeof self === 'undefined') return undefined; // Attempt to recreate Safari functionality by creating // WebKitDataCue objects when available and store the decoded // ID3 data in the value property of the cue return self.WebKitDataCue || self.VTTCue || self.TextTrackCue; } // VTTCue latest draft allows an infinite duration, fallback // to MAX_VALUE if necessary const MAX_CUE_ENDTIME = (() => { const Cue = getCueClass(); try { new Cue(0, Number.POSITIVE_INFINITY, ''); } catch (e) { return Number.MAX_VALUE; } return Number.POSITIVE_INFINITY; })(); function dateRangeDateToTimelineSeconds(date, offset) { return date.getTime() / 1000 - offset; } function hexToArrayBuffer(str) { return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer; } class ID3TrackController { constructor(hls) { this.hls = void 0; this.id3Track = null; this.media = null; this.dateRangeCuesAppended = {}; this.hls = hls; this._registerListeners(); } destroy() { this._unregisterListeners(); this.id3Track = null; this.media = null; this.dateRangeCuesAppended = {}; // @ts-ignore this.hls = null; } _registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this); hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this); hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this); } // Add ID3 metatadata text track. onMediaAttached(event, data) { this.media = data.media; } onMediaDetaching() { if (!this.id3Track) { return; } clearCurrentCues(this.id3Track); this.id3Track = null; this.media = null; this.dateRangeCuesAppended = {}; } onManifestLoading() { this.dateRangeCuesAppended = {}; } createTrack(media) { const track = this.getID3Track(media.textTracks); track.mode = 'hidden'; return track; } getID3Track(textTracks) { if (!this.media) { return; } for (let i = 0; i < textTracks.length; i++) { const textTrack = textTracks[i]; if (textTrack.kind === 'metadata' && textTrack.label === 'id3') { // send 'addtrack' when reusing the textTrack for metadata, // same as what we do for captions sendAddTrackEvent(textTrack, this.media); return textTrack; } } return this.media.addTextTrack('metadata', 'id3'); } onFragParsingMetadata(event, data) { if (!this.media) { return; } const { hls: { config: { enableEmsgMetadataCues, enableID3MetadataCues } } } = this; if (!enableEmsgMetadataCues && !enableID3MetadataCues) { return; } const { samples } = data; // create track dynamically if (!this.id3Track) { this.id3Track = this.createTrack(this.media); } const Cue = getCueClass(); for (let i = 0; i < samples.length; i++) { const type = samples[i].type; if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) { continue; } const frames = getID3Frames(samples[i].data); if (frames) { const startTime = samples[i].pts; let endTime = startTime + samples[i].duration; if (endTime > MAX_CUE_ENDTIME) { endTime = MAX_CUE_ENDTIME; } const timeDiff = endTime - startTime; if (timeDiff <= 0) { endTime = startTime + MIN_CUE_DURATION; } for (let j = 0; j < frames.length; j++) { const frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack if (!isTimeStampFrame(frame)) { // add a bounds to any unbounded cues this.updateId3CueEnds(startTime, type); const cue = new Cue(startTime, endTime, ''); cue.value = frame; if (type) { cue.type = type; } this.id3Track.addCue(cue); } } } } } updateId3CueEnds(startTime, type) { var _this$id3Track; const cues = (_this$id3Track = this.id3Track) == null ? void 0 : _this$id3Track.cues; if (cues) { for (let i = cues.length; i--;) { const cue = cues[i]; if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) { cue.endTime = startTime; } } } } onBufferFlushing(event, { startOffset, endOffset, type }) { const { id3Track, hls } = this; if (!hls) { return; } const { config: { enableEmsgMetadataCues, enableID3MetadataCues } } = hls; if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) { let predicate; if (type === 'audio') { predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues; } else if (type === 'video') { predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues; } else { predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues; } removeCuesInRange(id3Track, startOffset, endOffset, predicate); } } onLevelUpdated(event, { details }) { if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) { return; } const { dateRangeCuesAppended, id3Track } = this; const { dateRanges } = details; const ids = Object.keys(dateRanges); // Remove cues from track not found in details.dateRanges if (id3Track) { const idsToRemove = Object.keys(dateRangeCuesAppended).filter(id => !ids.includes(id)); for (let i = idsToRemove.length; i--;) { const id = idsToRemove[i]; Object.keys(dateRangeCuesAppended[id].cues).forEach(key => { id3Track.removeCue(dateRangeCuesAppended[id].cues[key]); }); delete dateRangeCuesAppended[id]; } } // Exit if the playlist does not have Date Ranges or does not have Program Date Time const lastFragment = details.fragments[details.fragments.length - 1]; if (ids.length === 0 || !isFiniteNumber(lastFragment == null ? void 0 : lastFragment.programDateTime)) { return; } if (!this.id3Track) { this.id3Track = this.createTrack(this.media); } const dateTimeOffset = lastFragment.programDateTime / 1000 - lastFragment.start; const Cue = getCueClass(); for (let i = 0; i < ids.length; i++) { const id = ids[i]; const dateRange = dateRanges[id]; const appendedDateRangeCues = dateRangeCuesAppended[id]; const cues = (appendedDateRangeCues == null ? void 0 : appendedDateRangeCues.cues) || {}; let durationKnown = (appendedDateRangeCues == null ? void 0 : appendedDateRangeCues.durationKnown) || false; const startTime = dateRangeDateToTimelineSeconds(dateRange.startDate, dateTimeOffset); let endTime = MAX_CUE_ENDTIME; const endDate = dateRange.endDate; if (endDate) { endTime = dateRangeDateToTimelineSeconds(endDate, dateTimeOffset); durationKnown = true; } else if (dateRange.endOnNext && !durationKnown) { const nextDateRangeWithSameClass = ids.reduce((filterMapArray, id) => { const candidate = dateRanges[id]; if (candidate.class === dateRange.class && candidate.id !== id && candidate.startDate > dateRange.startDate) { filterMapArray.push(candidate); } return filterMapArray; }, []).sort((a, b) => a.startDate.getTime() - b.startDate.getTime())[0]; if (nextDateRangeWithSameClass) { endTime = dateRangeDateToTimelineSeconds(nextDateRangeWithSameClass.startDate, dateTimeOffset); durationKnown = true; } } const attributes = Object.keys(dateRange.attr); for (let j = 0; j < attributes.length; j++) { const key = attributes[j]; if (!isDateRangeCueAttribute(key)) { continue; } let cue = cues[key]; if (cue) { if (durationKnown && !appendedDateRangeCues.durationKnown) { cue.endTime = endTime; } } else { let data = dateRange.attr[key]; cue = new Cue(startTime, endTime, ''); if (isSCTE35Attribute(key)) { data = hexToArrayBuffer(data); } cue.value = { key, data }; cue.type = MetadataSchema.dateRange; cue.id = id; this.id3Track.addCue(cue); cues[key] = cue; } } dateRangeCuesAppended[id] = { cues, dateRange, durationKnown }; } } } class LatencyController { constructor(hls) { this.hls = void 0; this.config = void 0; this.media = null; this.levelDetails = null; this.currentTime = 0; this.stallCount = 0; this._latency = null; this.timeupdateHandler = () => this.timeupdate(); this.hls = hls; this.config = hls.config; this.registerListeners(); } get latency() { return this._latency || 0; } get maxLatency() { const { config, levelDetails } = this; if (config.liveMaxLatencyDuration !== undefined) { return config.liveMaxLatencyDuration; } return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0; } get targetLatency() { const { levelDetails } = this; if (levelDetails === null) { return null; } const { holdBack, partHoldBack, targetduration } = levelDetails; const { liveSyncDuration, liveSyncDurationCount, lowLatencyMode } = this.config; const userConfig = this.hls.userConfig; let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack; if (userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) { targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration; } const maxLiveSyncOnStallIncrease = targetduration; const liveSyncOnStallIncrease = 1.0; return targetLatency + Math.min(this.stallCount * liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease); } get liveSyncPosition() { const liveEdge = this.estimateLiveEdge(); const targetLatency = this.targetLatency; const levelDetails = this.levelDetails; if (liveEdge === null || targetLatency === null || levelDetails === null) { return null; } const edge = levelDetails.edge; const syncPosition = liveEdge - targetLatency - this.edgeStalled; const min = edge - levelDetails.totalduration; const max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration); return Math.min(Math.max(min, syncPosition), max); } get drift() { const { levelDetails } = this; if (levelDetails === null) { return 1; } return levelDetails.drift; } get edgeStalled() { const { levelDetails } = this; if (levelDetails === null) { return 0; } const maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3; return Math.max(levelDetails.age - maxLevelUpdateAge, 0); } get forwardBufferLength() { const { media, levelDetails } = this; if (!media || !levelDetails) { return 0; } const bufferedRanges = media.buffered.length; return (bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge) - this.currentTime; } destroy() { this.unregisterListeners(); this.onMediaDetaching(); this.levelDetails = null; // @ts-ignore this.hls = this.timeupdateHandler = null; } registerListeners() { this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this); this.hls.on(Events.ERROR, this.onError, this); } unregisterListeners() { this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this); this.hls.off(Events.ERROR, this.onError, this); } onMediaAttached(event, data) { this.media = data.media; this.media.addEventListener('timeupdate', this.timeupdateHandler); } onMediaDetaching() { if (this.media) { this.media.removeEventListener('timeupdate', this.timeupdateHandler); this.media = null; } } onManifestLoading() { this.levelDetails = null; this._latency = null; this.stallCount = 0; } onLevelUpdated(event, { details }) { this.levelDetails = details; if (details.advanced) { this.timeupdate(); } if (!details.live && this.media) { this.media.removeEventListener('timeupdate', this.timeupdateHandler); } } onError(event, data) { var _this$levelDetails; if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) { return; } this.stallCount++; if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) { logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency'); } } timeupdate() { const { media, levelDetails } = this; if (!media || !levelDetails) { return; } this.currentTime = media.currentTime; const latency = this.computeLatency(); if (latency === null) { return; } this._latency = latency; // Adapt playbackRate to meet target latency in low-latency mode const { lowLatencyMode, maxLiveSyncPlaybackRate } = this.config; if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1) { return; } const targetLatency = this.targetLatency; if (targetLatency === null) { return; } const distanceFromTarget = latency - targetLatency; // Only adjust playbackRate when within one target duration of targetLatency // and more than one second from under-buffering. // Playback further than one target duration from target can be considered DVR playback. const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration); const inLiveRange = distanceFromTarget < liveMinLatencyDuration; if (levelDetails.live && inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) { const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate)); const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20; media.playbackRate = Math.min(max, Math.max(1, rate)); } else if (media.playbackRate !== 1 && media.playbackRate !== 0) { media.playbackRate = 1; } } estimateLiveEdge() { const { levelDetails } = this; if (levelDetails === null) { return null; } return levelDetails.edge + levelDetails.age; } computeLatency() { const liveEdge = this.estimateLiveEdge(); if (liveEdge === null) { return null; } return liveEdge - this.currentTime; } } const HdcpLevels = ['NONE', 'TYPE-0', 'TYPE-1', null]; var HlsSkip = { No: "", Yes: "YES", v2: "v2" }; function getSkipValue(details, msn) { const { canSkipUntil, canSkipDateRanges, endSN } = details; const snChangeGoal = msn !== undefined ? msn - endSN : 0; if (canSkipUntil && snChangeGoal < canSkipUntil) { if (canSkipDateRanges) { return HlsSkip.v2; } return HlsSkip.Yes; } return HlsSkip.No; } class HlsUrlParameters { constructor(msn, part, skip) { this.msn = void 0; this.part = void 0; this.skip = void 0; this.msn = msn; this.part = part; this.skip = skip; } addDirectives(uri) { const url = new self.URL(uri); if (this.msn !== undefined) { url.searchParams.set('_HLS_msn', this.msn.toString()); } if (this.part !== undefined) { url.searchParams.set('_HLS_part', this.part.toString()); } if (this.skip) { url.searchParams.set('_HLS_skip', this.skip); } return url.href; } } class Level { constructor(data) { this._attrs = void 0; this.audioCodec = void 0; this.bitrate = void 0; this.codecSet = void 0; this.height = void 0; this.id = void 0; this.name = void 0; this.videoCodec = void 0; this.width = void 0; this.unknownCodecs = void 0; this.audioGroupIds = void 0; this.details = void 0; this.fragmentError = 0; this.loadError = 0; this.loaded = void 0; this.realBitrate = 0; this.textGroupIds = void 0; this.url = void 0; this._urlId = 0; this.url = [data.url]; this._attrs = [data.attrs]; this.bitrate = data.bitrate; if (data.details) { this.details = data.details; } this.id = data.id || 0; this.name = data.name; this.width = data.width || 0; this.height = data.height || 0; this.audioCodec = data.audioCodec; this.videoCodec = data.videoCodec; this.unknownCodecs = data.unknownCodecs; this.codecSet = [data.videoCodec, data.audioCodec].filter(c => c).join(',').replace(/\.[^.,]+/g, ''); } get maxBitrate() { return Math.max(this.realBitrate, this.bitrate); } get attrs() { return this._attrs[this._urlId]; } get pathwayId() { return this.attrs['PATHWAY-ID'] || '.'; } get uri() { return this.url[this._urlId] || ''; } get urlId() { return this._urlId; } set urlId(value) { const newValue = value % this.url.length; if (this._urlId !== newValue) { this.fragmentError = 0; this.loadError = 0; this.details = undefined; this._urlId = newValue; } } get audioGroupId() { var _this$audioGroupIds; return (_this$audioGroupIds = this.audioGroupIds) == null ? void 0 : _this$audioGroupIds[this.urlId]; } get textGroupId() { var _this$textGroupIds; return (_this$textGroupIds = this.textGroupIds) == null ? void 0 : _this$textGroupIds[this.urlId]; } addFallback(data) { this.url.push(data.url); this._attrs.push(data.attrs); } } function updateFromToPTS(fragFrom, fragTo) { const fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx] if (isFiniteNumber(fragToPTS)) { // update fragment duration. // it helps to fix drifts between playlist reported duration and fragment real duration let duration = 0; let frag; if (fragTo.sn > fragFrom.sn) { duration = fragToPTS - fragFrom.start; frag = fragFrom; } else { duration = fragFrom.start - fragToPTS; frag = fragTo; } if (frag.duration !== duration) { frag.duration = duration; } // we dont know startPTS[toIdx] } else if (fragTo.sn > fragFrom.sn) { const contiguous = fragFrom.cc === fragTo.cc; // TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS if (contiguous && fragFrom.minEndPTS) { fragTo.start = fragFrom.start + (fragFrom.minEndPTS - fragFrom.start); } else { fragTo.start = fragFrom.start + fragFrom.duration; } } else { fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0); } } function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) { const parsedMediaDuration = endPTS - startPTS; if (parsedMediaDuration <= 0) { logger.warn('Fragment should have a positive duration', frag); endPTS = startPTS + frag.duration; endDTS = startDTS + frag.duration; } let maxStartPTS = startPTS; let minEndPTS = endPTS; const fragStartPts = frag.startPTS; const fragEndPts = frag.endPTS; if (isFiniteNumber(fragStartPts)) { // delta PTS between audio and video const deltaPTS = Math.abs(fragStartPts - startPTS); if (!isFiniteNumber(frag.deltaPTS)) { frag.deltaPTS = deltaPTS; } else { frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS); } maxStartPTS = Math.max(startPTS, fragStartPts); startPTS = Math.min(startPTS, fragStartPts); startDTS = Math.min(startDTS, frag.startDTS); minEndPTS = Math.min(endPTS, fragEndPts); endPTS = Math.max(endPTS, fragEndPts); endDTS = Math.max(endDTS, frag.endDTS); } const drift = startPTS - frag.start; if (frag.start !== 0) { frag.start = startPTS; } frag.duration = endPTS - frag.start; frag.startPTS = startPTS; frag.maxStartPTS = maxStartPTS; frag.startDTS = startDTS; frag.endPTS = endPTS; frag.minEndPTS = minEndPTS; frag.endDTS = endDTS; const sn = frag.sn; // 'initSegment' // exit if sn out of range if (!details || sn < details.startSN || sn > details.endSN) { return 0; } let i; const fragIdx = sn - details.startSN; const fragments = details.fragments; // update frag reference in fragments array // rationale is that fragments array might not contain this frag object. // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS() // if we don't update frag, we won't be able to propagate PTS info on the playlist // resulting in invalid sliding computation fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0 for (i = fragIdx; i > 0; i--) { updateFromToPTS(fragments[i], fragments[i - 1]); } // adjust fragment PTS/duration from seqnum to last frag for (i = fragIdx; i < fragments.length - 1; i++) { updateFromToPTS(fragments[i], fragments[i + 1]); } if (details.fragmentHint) { updateFromToPTS(fragments[fragments.length - 1], details.fragmentHint); } details.PTSKnown = details.alignedSliding = true; return drift; } function mergeDetails(oldDetails, newDetails) { // Track the last initSegment processed. Initialize it to the last one on the timeline. let currentInitSegment = null; const oldFragments = oldDetails.fragments; for (let i = oldFragments.length - 1; i >= 0; i--) { const oldInit = oldFragments[i].initSegment; if (oldInit) { currentInitSegment = oldInit; break; } } if (oldDetails.fragmentHint) { // prevent PTS and duration from being adjusted on the next hint delete oldDetails.fragmentHint.endPTS; } // check if old/new playlists have fragments in common // loop through overlapping SN and update startPTS , cc, and duration if any found let ccOffset = 0; let PTSFrag; mapFragmentIntersection(oldDetails, newDetails, (oldFrag, newFrag) => { if (oldFrag.relurl) { // Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts. // It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end // of the playlist. ccOffset = oldFrag.cc - newFrag.cc; } if (isFiniteNumber(oldFrag.startPTS) && isFiniteNumber(oldFrag.endPTS)) { newFrag.start = newFrag.startPTS = oldFrag.startPTS; newFrag.startDTS = oldFrag.startDTS; newFrag.maxStartPTS = oldFrag.maxStartPTS; newFrag.endPTS = oldFrag.endPTS; newFrag.endDTS = oldFrag.endDTS; newFrag.minEndPTS = oldFrag.minEndPTS; newFrag.duration = oldFrag.endPTS - oldFrag.startPTS; if (newFrag.duration) { PTSFrag = newFrag; } // PTS is known when any segment has startPTS and endPTS newDetails.PTSKnown = newDetails.alignedSliding = true; } newFrag.elementaryStreams = oldFrag.elementaryStreams; newFrag.loader = oldFrag.loader; newFrag.stats = oldFrag.stats; newFrag.urlId = oldFrag.urlId; if (oldFrag.initSegment) { newFrag.initSegment = oldFrag.initSegment; currentInitSegment = oldFrag.initSegment; } }); if (currentInitSegment) { const fragmentsToCheck = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments; fragmentsToCheck.forEach(frag => { var _currentInitSegment; if (!frag.initSegment || frag.initSegment.relurl === ((_currentInitSegment = currentInitSegment) == null ? void 0 : _currentInitSegment.relurl)) { frag.initSegment = currentInitSegment; } }); } if (newDetails.skippedSegments) { newDetails.deltaUpdateFailed = newDetails.fragments.some(frag => !frag); if (newDetails.deltaUpdateFailed) { logger.warn('[level-helper] Previous playlist missing segments skipped in delta playlist'); for (let i = newDetails.skippedSegments; i--;) { newDetails.fragments.shift(); } newDetails.startSN = newDetails.fragments[0].sn; newDetails.startCC = newDetails.fragments[0].cc; } else if (newDetails.canSkipDateRanges) { newDetails.dateRanges = mergeDateRanges(oldDetails.dateRanges, newDetails.dateRanges, newDetails.recentlyRemovedDateranges); } } const newFragments = newDetails.fragments; if (ccOffset) { logger.warn('discontinuity sliding from playlist, take drift into account'); for (let i = 0; i < newFragments.length; i++) { newFragments[i].cc += ccOffset; } } if (newDetails.skippedSegments) { newDetails.startCC = newDetails.fragments[0].cc; } // Merge parts mapPartIntersection(oldDetails.partList, newDetails.partList, (oldPart, newPart) => { newPart.elementaryStreams = oldPart.elementaryStreams; newPart.stats = oldPart.stats; }); // if at least one fragment contains PTS info, recompute PTS information for all fragments if (PTSFrag) { updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS); } else { // ensure that delta is within oldFragments range // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61]) // in that case we also need to adjust start offset of all fragments adjustSliding(oldDetails, newDetails); } if (newFragments.length) { newDetails.totalduration = newDetails.edge - newFragments[0].start; } newDetails.driftStartTime = oldDetails.driftStartTime; newDetails.driftStart = oldDetails.driftStart; const advancedDateTime = newDetails.advancedDateTime; if (newDetails.advanced && advancedDateTime) { const edge = newDetails.edge; if (!newDetails.driftStart) { newDetails.driftStartTime = advancedDateTime; newDetails.driftStart = edge; } newDetails.driftEndTime = advancedDateTime; newDetails.driftEnd = edge; } else { newDetails.driftEndTime = oldDetails.driftEndTime; newDetails.driftEnd = oldDetails.driftEnd; newDetails.advancedDateTime = oldDetails.advancedDateTime; } } function mergeDateRanges(oldDateRanges, deltaDateRanges, recentlyRemovedDateranges) { const dateRanges = _extends({}, oldDateRanges); if (recentlyRemovedDateranges) { recentlyRemovedDateranges.forEach(id => { delete dateRanges[id]; }); } Object.keys(deltaDateRanges).forEach(id => { const dateRange = new DateRange(deltaDateRanges[id].attr, dateRanges[id]); if (dateRange.isValid) { dateRanges[id] = dateRange; } else { logger.warn(`Ignoring invalid Playlist Delta Update DATERANGE tag: "${JSON.stringify(deltaDateRanges[id].attr)}"`); } }); return dateRanges; } function mapPartIntersection(oldParts, newParts, intersectionFn) { if (oldParts && newParts) { let delta = 0; for (let i = 0, len = oldParts.length; i <= len; i++) { const oldPart = oldParts[i]; const newPart = newParts[i + delta]; if (oldPart && newPart && oldPart.index === newPart.index && oldPart.fragment.sn === newPart.fragment.sn) { intersectionFn(oldPart, newPart); } else { delta--; } } } } function mapFragmentIntersection(oldDetails, newDetails, intersectionFn) { const skippedSegments = newDetails.skippedSegments; const start = Math.max(oldDetails.startSN, newDetails.startSN) - newDetails.startSN; const end = (oldDetails.fragmentHint ? 1 : 0) + (skippedSegments ? newDetails.endSN : Math.min(oldDetails.endSN, newDetails.endSN)) - newDetails.startSN; const delta = newDetails.startSN - oldDetails.startSN; const newFrags = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments; const oldFrags = oldDetails.fragmentHint ? oldDetails.fragments.concat(oldDetails.fragmentHint) : oldDetails.fragments; for (let i = start; i <= end; i++) { const oldFrag = oldFrags[delta + i]; let newFrag = newFrags[i]; if (skippedSegments && !newFrag && i < skippedSegments) { // Fill in skipped segments in delta playlist newFrag = newDetails.fragments[i] = oldFrag; } if (oldFrag && newFrag) { intersectionFn(oldFrag, newFrag); } } } function adjustSliding(oldDetails, newDetails) { const delta = newDetails.startSN + newDetails.skippedSegments - oldDetails.startSN; const oldFragments = oldDetails.fragments; if (delta < 0 || delta >= oldFragments.length) { return; } addSliding(newDetails, oldFragments[delta].start); } function addSliding(details, start) { if (start) { const fragments = details.fragments; for (let i = details.skippedSegments; i < fragments.length; i++) { fragments[i].start += start; } if (details.fragmentHint) { details.fragmentHint.start += start; } } } function computeReloadInterval(newDetails, distanceToLiveEdgeMs = Infinity) { let reloadInterval = 1000 * newDetails.targetduration; if (newDetails.updated) { // Use last segment duration when shorter than target duration and near live edge const fragments = newDetails.fragments; const liveEdgeMaxTargetDurations = 4; if (fragments.length && reloadInterval * liveEdgeMaxTargetDurations > distanceToLiveEdgeMs) { const lastSegmentDuration = fragments[fragments.length - 1].duration * 1000; if (lastSegmentDuration < reloadInterval) { reloadInterval = lastSegmentDuration; } } } else { // estimate = 'miss half average'; // follow HLS Spec, If the client reloads a Playlist file and finds that it has not // changed then it MUST wait for a period of one-half the target // duration before retrying. reloadInterval /= 2; } return Math.round(reloadInterval); } function getFragmentWithSN(level, sn, fragCurrent) { if (!(level != null && level.details)) { return null; } const levelDetails = level.details; let fragment = levelDetails.fragments[sn - levelDetails.startSN]; if (fragment) { return fragment; } fragment = levelDetails.fragmentHint; if (fragment && fragment.sn === sn) { return fragment; } if (sn < levelDetails.startSN && fragCurrent && fragCurrent.sn === sn) { return fragCurrent; } return null; } function getPartWith(level, sn, partIndex) { var _level$details; if (!(level != null && level.details)) { return null; } return findPart((_level$details = level.details) == null ? void 0 : _level$details.partList, sn, partIndex); } function findPart(partList, sn, partIndex) { if (partList) { for (let i = partList.length; i--;) { const part = partList[i]; if (part.index === partIndex && part.fragment.sn === sn) { return part; } } } return null; } function isTimeoutError(error) { switch (error.details) { case ErrorDetails.FRAG_LOAD_TIMEOUT: case ErrorDetails.KEY_LOAD_TIMEOUT: case ErrorDetails.LEVEL_LOAD_TIMEOUT: case ErrorDetails.MANIFEST_LOAD_TIMEOUT: return true; } return false; } function getRetryConfig(loadPolicy, error) { const isTimeout = isTimeoutError(error); return loadPolicy.default[`${isTimeout ? 'timeout' : 'error'}Retry`]; } function getRetryDelay(retryConfig, retryCount) { // exponential backoff capped to max retry delay const backoffFactor = retryConfig.backoff === 'linear' ? 1 : Math.pow(2, retryCount); return Math.min(backoffFactor * retryConfig.retryDelayMs, retryConfig.maxRetryDelayMs); } function getLoaderConfigWithoutReties(loderConfig) { return _objectSpread2(_objectSpread2({}, loderConfig), { errorRetry: null, timeoutRetry: null }); } function shouldRetry(retryConfig, retryCount, isTimeout, httpStatus) { return !!retryConfig && retryCount < retryConfig.maxNumRetry && (retryForHttpStatus(httpStatus) || !!isTimeout); } function retryForHttpStatus(httpStatus) { // Do not retry on status 4xx, status 0 (CORS error), or undefined (decrypt/gap/parse error) return httpStatus === 0 && navigator.onLine === false || !!httpStatus && (httpStatus < 400 || httpStatus > 499); } const BinarySearch = { /** * Searches for an item in an array which matches a certain condition. * This requires the condition to only match one item in the array, * and for the array to be ordered. * * @param list The array to search. * @param comparisonFn * Called and provided a candidate item as the first argument. * Should return: * > -1 if the item should be located at a lower index than the provided item. * > 1 if the item should be located at a higher index than the provided item. * > 0 if the item is the item you're looking for. * * @returns the object if found, otherwise returns null */ search: function (list, comparisonFn) { let minIndex = 0; let maxIndex = list.length - 1; let currentIndex = null; let currentElement = null; while (minIndex <= maxIndex) { currentIndex = (minIndex + maxIndex) / 2 | 0; currentElement = list[currentIndex]; const comparisonResult = comparisonFn(currentElement); if (comparisonResult > 0) { minIndex = currentIndex + 1; } else if (comparisonResult < 0) { maxIndex = currentIndex - 1; } else { return currentElement; } } return null; } }; /** * Returns first fragment whose endPdt value exceeds the given PDT, or null. * @param fragments - The array of candidate fragments * @param PDTValue - The PDT value which must be exceeded * @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous */ function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) { if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !isFiniteNumber(PDTValue)) { return null; } // if less than start const startPDT = fragments[0].programDateTime; if (PDTValue < (startPDT || 0)) { return null; } const endPDT = fragments[fragments.length - 1].endProgramDateTime; if (PDTValue >= (endPDT || 0)) { return null; } maxFragLookUpTolerance = maxFragLookUpTolerance || 0; for (let seg = 0; seg < fragments.length; ++seg) { const frag = fragments[seg]; if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) { return frag; } } return null; } /** * Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer. * This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus * breaking any traps which would cause the same fragment to be continuously selected within a small range. * @param fragPrevious - The last frag successfully appended * @param fragments - The array of candidate fragments * @param bufferEnd - The end of the contiguous buffered range the playhead is currently within * @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous * @returns a matching fragment or null */ function findFragmentByPTS(fragPrevious, fragments, bufferEnd = 0, maxFragLookUpTolerance = 0) { let fragNext = null; if (fragPrevious) { fragNext = fragments[fragPrevious.sn - fragments[0].sn + 1] || null; } else if (bufferEnd === 0 && fragments[0].start === 0) { fragNext = fragments[0]; } // Prefer the next fragment if it's within tolerance if (fragNext && fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext) === 0) { return fragNext; } // We might be seeking past the tolerance so find the best match const foundFragment = BinarySearch.search(fragments, fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance)); if (foundFragment && (foundFragment !== fragPrevious || !fragNext)) { return foundFragment; } // If no match was found return the next fragment after fragPrevious, or null return fragNext; } /** * The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions. * @param candidate - The fragment to test * @param bufferEnd - The end of the current buffered range the playhead is currently within * @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous * @returns 0 if it matches, 1 if too low, -1 if too high */ function fragmentWithinToleranceTest(bufferEnd = 0, maxFragLookUpTolerance = 0, candidate) { // eagerly accept an accurate match (no tolerance) if (candidate.start <= bufferEnd && candidate.start + candidate.duration > bufferEnd) { return 0; } // offset should be within fragment boundary - config.maxFragLookUpTolerance // this is to cope with situations like // bufferEnd = 9.991 // frag[Ø] : [0,10] // frag[1] : [10,20] // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here // frag start frag start+duration // |-----------------------------| // <---> <---> // ...--------><-----------------------------><---------.... // previous frag matching fragment next frag // return -1 return 0 return 1 // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`); // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments const candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)); if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) { return 1; } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) { // if maxFragLookUpTolerance will have negative value then don't return -1 for first element return -1; } return 0; } /** * The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions. * This function tests the candidate's program date time values, as represented in Unix time * @param candidate - The fragment to test * @param pdtBufferEnd - The Unix time representing the end of the current buffered range * @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous * @returns true if contiguous, false otherwise */ function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) { const candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000; // endProgramDateTime can be null, default to zero const endProgramDateTime = candidate.endProgramDateTime || 0; return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd; } function findFragWithCC(fragments, cc) { return BinarySearch.search(fragments, candidate => { if (candidate.cc < cc) { return 1; } else if (candidate.cc > cc) { return -1; } else { return 0; } }); } const RENDITION_PENALTY_DURATION_MS = 300000; var NetworkErrorAction = { DoNothing: 0, SendEndCallback: 1, SendAlternateToPenaltyBox: 2, RemoveAlternatePermanently: 3, InsertDiscontinuity: 4, RetryRequest: 5 }; var ErrorActionFlags = { None: 0, MoveAllAlternatesMatchingHost: 1, MoveAllAlternatesMatchingHDCP: 2, SwitchToSDR: 4 }; // Reserved for future use class ErrorController { constructor(hls) { this.hls = void 0; this.playlistError = 0; this.penalizedRenditions = {}; this.log = void 0; this.warn = void 0; this.error = void 0; this.hls = hls; this.log = logger.log.bind(logger, `[info]:`); this.warn = logger.warn.bind(logger, `[warning]:`); this.error = logger.error.bind(logger, `[error]:`); this.registerListeners(); } registerListeners() { const hls = this.hls; hls.on(Events.ERROR, this.onError, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this); } unregisterListeners() { const hls = this.hls; if (!hls) { return; } hls.off(Events.ERROR, this.onError, this); hls.off(Events.ERROR, this.onErrorOut, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this); } destroy() { this.unregisterListeners(); // @ts-ignore this.hls = null; this.penalizedRenditions = {}; } startLoad(startPosition) { this.playlistError = 0; } stopLoad() {} getVariantLevelIndex(frag) { return (frag == null ? void 0 : frag.type) === PlaylistLevelType.MAIN ? frag.level : this.hls.loadLevel; } onManifestLoading() { this.playlistError = 0; this.penalizedRenditions = {}; } onLevelUpdated() { this.playlistError = 0; } onError(event, data) { var _data$frag, _data$level; if (data.fatal) { return; } const hls = this.hls; const context = data.context; switch (data.details) { case ErrorDetails.FRAG_LOAD_ERROR: case ErrorDetails.FRAG_LOAD_TIMEOUT: case ErrorDetails.KEY_LOAD_ERROR: case ErrorDetails.KEY_LOAD_TIMEOUT: data.errorAction = this.getFragRetryOrSwitchAction(data); return; case ErrorDetails.FRAG_PARSING_ERROR: // ignore empty segment errors marked as gap if ((_data$frag = data.frag) != null && _data$frag.gap) { data.errorAction = { action: NetworkErrorAction.DoNothing, flags: ErrorActionFlags.None }; return; } // falls through case ErrorDetails.FRAG_GAP: case ErrorDetails.FRAG_DECRYPT_ERROR: { // Switch level if possible, otherwise allow retry count to reach max error retries data.errorAction = this.getFragRetryOrSwitchAction(data); data.errorAction.action = NetworkErrorAction.SendAlternateToPenaltyBox; return; } case ErrorDetails.LEVEL_EMPTY_ERROR: case ErrorDetails.LEVEL_PARSING_ERROR: { var _data$context, _data$context$levelDe; // Only retry when empty and live const levelIndex = data.parent === PlaylistLevelType.MAIN ? data.level : hls.loadLevel; if (data.details === ErrorDetails.LEVEL_EMPTY_ERROR && !!((_data$context = data.context) != null && (_data$context$levelDe = _data$context.levelDetails) != null && _data$context$levelDe.live)) { data.errorAction = this.getPlaylistRetryOrSwitchAction(data, levelIndex); } else { // Escalate to fatal if not retrying or switching data.levelRetry = false; data.errorAction = this.getLevelSwitchAction(data, levelIndex); } } return; case ErrorDetails.LEVEL_LOAD_ERROR: case ErrorDetails.LEVEL_LOAD_TIMEOUT: if (typeof (context == null ? void 0 : context.level) === 'number') { data.errorAction = this.getPlaylistRetryOrSwitchAction(data, context.level); } return; case ErrorDetails.AUDIO_TRACK_LOAD_ERROR: case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT: case ErrorDetails.SUBTITLE_LOAD_ERROR: case ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT: if (context) { const level = hls.levels[hls.loadLevel]; if (level && (context.type === PlaylistContextType.AUDIO_TRACK && context.groupId === level.audioGroupId || context.type === PlaylistContextType.SUBTITLE_TRACK && context.groupId === level.textGroupId)) { // Perform Pathway switch or Redundant failover if possible for fastest recovery // otherwise allow playlist retry count to reach max error retries data.errorAction = this.getPlaylistRetryOrSwitchAction(data, hls.loadLevel); data.errorAction.action = NetworkErrorAction.SendAlternateToPenaltyBox; data.errorAction.flags = ErrorActionFlags.MoveAllAlternatesMatchingHost; return; } } return; case ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED: { const level = hls.levels[hls.loadLevel]; const restrictedHdcpLevel = level == null ? void 0 : level.attrs['HDCP-LEVEL']; if (restrictedHdcpLevel) { data.errorAction = { action: NetworkErrorAction.SendAlternateToPenaltyBox, flags: ErrorActionFlags.MoveAllAlternatesMatchingHDCP, hdcpLevel: restrictedHdcpLevel }; } } return; case ErrorDetails.BUFFER_ADD_CODEC_ERROR: case ErrorDetails.REMUX_ALLOC_ERROR: data.errorAction = this.getLevelSwitchAction(data, (_data$level = data.level) != null ? _data$level : hls.loadLevel); return; case ErrorDetails.INTERNAL_EXCEPTION: case ErrorDetails.BUFFER_APPENDING_ERROR: case ErrorDetails.BUFFER_APPEND_ERROR: case ErrorDetails.BUFFER_FULL_ERROR: case ErrorDetails.LEVEL_SWITCH_ERROR: case ErrorDetails.BUFFER_STALLED_ERROR: case ErrorDetails.BUFFER_SEEK_OVER_HOLE: case ErrorDetails.BUFFER_NUDGE_ON_STALL: data.errorAction = { action: NetworkErrorAction.DoNothing, flags: ErrorActionFlags.None }; return; } if (data.type === ErrorTypes.KEY_SYSTEM_ERROR) { const levelIndex = this.getVariantLevelIndex(data.frag); // Do not retry level. Escalate to fatal if switching levels fails. data.levelRetry = false; data.errorAction = this.getLevelSwitchAction(data, levelIndex); return; } } getPlaylistRetryOrSwitchAction(data, levelIndex) { var _data$response; const hls = this.hls; const retryConfig = getRetryConfig(hls.config.playlistLoadPolicy, data); const retryCount = this.playlistError++; const httpStatus = (_data$response = data.response) == null ? void 0 : _data$response.code; const retry = shouldRetry(retryConfig, retryCount, isTimeoutError(data), httpStatus); if (retry) { return { action: NetworkErrorAction.RetryRequest, flags: ErrorActionFlags.None, retryConfig, retryCount }; } const errorAction = this.getLevelSwitchAction(data, levelIndex); if (retryConfig) { errorAction.retryConfig = retryConfig; errorAction.retryCount = retryCount; } return errorAction; } getFragRetryOrSwitchAction(data) { const hls = this.hls; // Share fragment error count accross media options (main, audio, subs) // This allows for level based rendition switching when media option assets fail const variantLevelIndex = this.getVariantLevelIndex(data.frag); const level = hls.levels[variantLevelIndex]; const { fragLoadPolicy, keyLoadPolicy } = hls.config; const retryConfig = getRetryConfig(data.details.startsWith('key') ? keyLoadPolicy : fragLoadPolicy, data); const fragmentErrors = hls.levels.reduce((acc, level) => acc + level.fragmentError, 0); // Switch levels when out of retried or level index out of bounds if (level) { var _data$response2; if (data.details !== ErrorDetails.FRAG_GAP) { level.fragmentError++; } const httpStatus = (_data$response2 = data.response) == null ? void 0 : _data$response2.code; const retry = shouldRetry(retryConfig, fragmentErrors, isTimeoutError(data), httpStatus); if (retry) { return { action: NetworkErrorAction.RetryRequest, flags: ErrorActionFlags.None, retryConfig, retryCount: fragmentErrors }; } } // Reach max retry count, or Missing level reference // Switch to valid index const errorAction = this.getLevelSwitchAction(data, variantLevelIndex); // Add retry details to allow skipping of FRAG_PARSING_ERROR if (retryConfig) { errorAction.retryConfig = retryConfig; errorAction.retryCount = fragmentErrors; } return errorAction; } getLevelSwitchAction(data, levelIndex) { const hls = this.hls; if (levelIndex === null || levelIndex === undefined) { levelIndex = hls.loadLevel; } const level = this.hls.levels[levelIndex]; if (level) { level.loadError++; if (hls.autoLevelEnabled) { var _data$frag2, _data$context2; // Search for next level to retry let nextLevel = -1; const { levels, loadLevel, minAutoLevel, maxAutoLevel } = hls; const fragErrorType = (_data$frag2 = data.frag) == null ? void 0 : _data$frag2.type; const { type: playlistErrorType, groupId: playlistErrorGroupId } = (_data$context2 = data.context) != null ? _data$context2 : {}; for (let i = levels.length; i--;) { const candidate = (i + loadLevel) % levels.length; if (candidate !== loadLevel && candidate >= minAutoLevel && candidate <= maxAutoLevel && levels[candidate].loadError === 0) { const levelCandidate = levels[candidate]; // Skip level switch if GAP tag is found in next level at same position if (data.details === ErrorDetails.FRAG_GAP && data.frag) { const levelDetails = levels[candidate].details; if (levelDetails) { const fragCandidate = findFragmentByPTS(data.frag, levelDetails.fragments, data.frag.start); if (fragCandidate != null && fragCandidate.gap) { continue; } } } else if (playlistErrorType === PlaylistContextType.AUDIO_TRACK && playlistErrorGroupId === levelCandidate.audioGroupId || playlistErrorType === PlaylistContextType.SUBTITLE_TRACK && playlistErrorGroupId === levelCandidate.textGroupId) { // For audio/subs playlist errors find another group ID or fallthrough to redundant fail-over continue; } else if (fragErrorType === PlaylistLevelType.AUDIO && level.audioGroupId === levelCandidate.audioGroupId || fragErrorType === PlaylistLevelType.SUBTITLE && level.textGroupId === levelCandidate.textGroupId) { // For audio/subs frag errors find another group ID or fallthrough to redundant fail-over continue; } nextLevel = candidate; break; } } if (nextLevel > -1 && hls.loadLevel !== nextLevel) { data.levelRetry = true; this.playlistError = 0; return { action: NetworkErrorAction.SendAlternateToPenaltyBox, flags: ErrorActionFlags.None, nextAutoLevel: nextLevel }; } } } // No levels to switch / Manual level selection / Level not found // Resolve with Pathway switch, Redundant fail-over, or stay on lowest Level return { action: NetworkErrorAction.SendAlternateToPenaltyBox, flags: ErrorActionFlags.MoveAllAlternatesMatchingHost }; } onErrorOut(event, data) { var _data$errorAction; switch ((_data$errorAction = data.errorAction) == null ? void 0 : _data$errorAction.action) { case NetworkErrorAction.DoNothing: break; case NetworkErrorAction.SendAlternateToPenaltyBox: this.sendAlternateToPenaltyBox(data); if (!data.errorAction.resolved && data.details !== ErrorDetails.FRAG_GAP) { data.fatal = true; } break; } if (data.fatal) { this.hls.stopLoad(); return; } } sendAlternateToPenaltyBox(data) { const hls = this.hls; const errorAction = data.errorAction; if (!errorAction) { return; } const { flags, hdcpLevel, nextAutoLevel } = errorAction; switch (flags) { case ErrorActionFlags.None: this.switchLevel(data, nextAutoLevel); break; case ErrorActionFlags.MoveAllAlternatesMatchingHost: { // Handle Redundant Levels here. Pathway switching is handled by content-steering-controller if (!errorAction.resolved) { errorAction.resolved = this.redundantFailover(data); } } break; case ErrorActionFlags.MoveAllAlternatesMatchingHDCP: if (hdcpLevel) { hls.maxHdcpLevel = HdcpLevels[HdcpLevels.indexOf(hdcpLevel) - 1]; errorAction.resolved = true; } this.warn(`Restricting playback to HDCP-LEVEL of "${hls.maxHdcpLevel}" or lower`); break; } // If not resolved by previous actions try to switch to next level if (!errorAction.resolved) { this.switchLevel(data, nextAutoLevel); } } switchLevel(data, levelIndex) { if (levelIndex !== undefined && data.errorAction) { this.warn(`switching to level ${levelIndex} after ${data.details}`); this.hls.nextAutoLevel = levelIndex; data.errorAction.resolved = true; // Stream controller is responsible for this but won't switch on false start this.hls.nextLoadLevel = this.hls.nextAutoLevel; } } redundantFailover(data) { const { hls, penalizedRenditions } = this; const levelIndex = data.parent === PlaylistLevelType.MAIN ? data.level : hls.loadLevel; const level = hls.levels[levelIndex]; const redundantLevels = level.url.length; const errorUrlId = data.frag ? data.frag.urlId : level.urlId; if (level.urlId === errorUrlId && (!data.frag || level.details)) { this.penalizeRendition(level, data); } for (let i = 1; i < redundantLevels; i++) { const newUrlId = (errorUrlId + i) % redundantLevels; const penalizedRendition = penalizedRenditions[newUrlId]; // Check if rendition is penalized and skip if it is a bad fit for failover if (!penalizedRendition || checkExpired(penalizedRendition, data, penalizedRenditions[errorUrlId])) { // delete penalizedRenditions[newUrlId]; // Update the url id of all levels so that we stay on the same set of variants when level switching this.warn(`Switching to Redundant Stream ${newUrlId + 1}/${redundantLevels}: "${level.url[newUrlId]}" after ${data.details}`); this.playlistError = 0; hls.levels.forEach(lv => { lv.urlId = newUrlId; }); hls.nextLoadLevel = levelIndex; return true; } } return false; } penalizeRendition(level, data) { const { penalizedRenditions } = this; const penalizedRendition = penalizedRenditions[level.urlId] || { lastErrorPerfMs: 0, errors: [], details: undefined }; penalizedRendition.lastErrorPerfMs = performance.now(); penalizedRendition.errors.push(data); penalizedRendition.details = level.details; penalizedRenditions[level.urlId] = penalizedRendition; } } function checkExpired(penalizedRendition, data, currentPenaltyState) { // Expire penalty for switching back to rendition after RENDITION_PENALTY_DURATION_MS if (performance.now() - penalizedRendition.lastErrorPerfMs > RENDITION_PENALTY_DURATION_MS) { return true; } // Expire penalty on GAP tag error if rendition has no GAP at position (does not cover media tracks) const lastErrorDetails = penalizedRendition.details; if (data.details === ErrorDetails.FRAG_GAP && lastErrorDetails && data.frag) { const position = data.frag.start; const candidateFrag = findFragmentByPTS(null, lastErrorDetails.fragments, position); if (candidateFrag && !candidateFrag.gap) { return true; } } // Expire penalty if there are more errors in currentLevel than in penalizedRendition if (currentPenaltyState && penalizedRendition.errors.length < currentPenaltyState.errors.length) { const lastCandidateError = penalizedRendition.errors[penalizedRendition.errors.length - 1]; if (lastErrorDetails && lastCandidateError.frag && data.frag && Math.abs(lastCandidateError.frag.start - data.frag.start) > lastErrorDetails.targetduration * 3) { return true; } } return false; } class BasePlaylistController { constructor(hls, logPrefix) { this.hls = void 0; this.timer = -1; this.requestScheduled = -1; this.canLoad = false; this.log = void 0; this.warn = void 0; this.log = logger.log.bind(logger, `${logPrefix}:`); this.warn = logger.warn.bind(logger, `${logPrefix}:`); this.hls = hls; } destroy() { this.clearTimer(); // @ts-ignore this.hls = this.log = this.warn = null; } clearTimer() { clearTimeout(this.timer); this.timer = -1; } startLoad() { this.canLoad = true; this.requestScheduled = -1; this.loadPlaylist(); } stopLoad() { this.canLoad = false; this.clearTimer(); } switchParams(playlistUri, previous) { const renditionReports = previous == null ? void 0 : previous.renditionReports; if (renditionReports) { let foundIndex = -1; for (let i = 0; i < renditionReports.length; i++) { const attr = renditionReports[i]; let uri; try { uri = new self.URL(attr.URI, previous.url).href; } catch (error) { logger.warn(`Could not construct new URL for Rendition Report: ${error}`); uri = attr.URI || ''; } // Use exact match. Otherwise, the last partial match, if any, will be used // (Playlist URI includes a query string that the Rendition Report does not) if (uri === playlistUri) { foundIndex = i; break; } else if (uri === playlistUri.substring(0, uri.length)) { foundIndex = i; } } if (foundIndex !== -1) { const attr = renditionReports[foundIndex]; const msn = parseInt(attr['LAST-MSN']) || (previous == null ? void 0 : previous.lastPartSn); let part = parseInt(attr['LAST-PART']) || (previous == null ? void 0 : previous.lastPartIndex); if (this.hls.config.lowLatencyMode) { const currentGoal = Math.min(previous.age - previous.partTarget, previous.targetduration); if (part >= 0 && currentGoal > previous.partTarget) { part += 1; } } return new HlsUrlParameters(msn, part >= 0 ? part : undefined, HlsSkip.No); } } } loadPlaylist(hlsUrlParameters) { if (this.requestScheduled === -1) { this.requestScheduled = self.performance.now(); } // Loading is handled by the subclasses } shouldLoadPlaylist(playlist) { return this.canLoad && !!playlist && !!playlist.url && (!playlist.details || playlist.details.live); } shouldReloadPlaylist(playlist) { return this.timer === -1 && this.requestScheduled === -1 && this.shouldLoadPlaylist(playlist); } playlistLoaded(index, data, previousDetails) { const { details, stats } = data; // Set last updated date-time const now = self.performance.now(); const elapsed = stats.loading.first ? Math.max(0, now - stats.loading.first) : 0; details.advancedDateTime = Date.now() - elapsed; // if current playlist is a live playlist, arm a timer to reload it if (details.live || previousDetails != null && previousDetails.live) { details.reloaded(previousDetails); if (previousDetails) { this.log(`live playlist ${index} ${details.advanced ? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex : details.updated ? 'UPDATED' : 'MISSED'}`); } // Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments if (previousDetails && details.fragments.length > 0) { mergeDetails(previousDetails, details); } if (!this.canLoad || !details.live) { return; } let deliveryDirectives; let msn = undefined; let part = undefined; if (details.canBlockReload && details.endSN && details.advanced) { // Load level with LL-HLS delivery directives const lowLatencyMode = this.hls.config.lowLatencyMode; const lastPartSn = details.lastPartSn; const endSn = details.endSN; const lastPartIndex = details.lastPartIndex; const hasParts = lastPartIndex !== -1; const lastPart = lastPartSn === endSn; // When low latency mode is disabled, we'll skip part requests once the last part index is found const nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex; if (hasParts) { msn = lastPart ? endSn + 1 : lastPartSn; part = lastPart ? nextSnStartIndex : lastPartIndex + 1; } else { msn = endSn + 1; } // Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part // Update directives to obtain the Playlist that has the estimated additional duration of media const lastAdvanced = details.age; const cdnAge = lastAdvanced + details.ageHeader; let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5); if (currentGoal > 0) { if (previousDetails && currentGoal > previousDetails.tuneInGoal) { // If we attempted to get the next or latest playlist update, but currentGoal increased, // then we either can't catchup, or the "age" header cannot be trusted. this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`); currentGoal = 0; } else { const segments = Math.floor(currentGoal / details.targetduration); msn += segments; if (part !== undefined) { const parts = Math.round(currentGoal % details.targetduration / details.partTarget); part += parts; } this.log(`CDN Tune-in age: ${details.ageHeader}s last advanced ${lastAdvanced.toFixed(2)}s goal: ${currentGoal} skip sn ${segments} to part ${part}`); } details.tuneInGoal = currentGoal; } deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part); if (lowLatencyMode || !lastPart) { this.loadPlaylist(deliveryDirectives); return; } } else if (details.canBlockReload || details.canSkipUntil) { deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part); } const bufferInfo = this.hls.mainForwardBufferInfo; const position = bufferInfo ? bufferInfo.end - bufferInfo.len : 0; const distanceToLiveEdgeMs = (details.edge - position) * 1000; const reloadInterval = computeReloadInterval(details, distanceToLiveEdgeMs); if (details.updated && now > this.requestScheduled + reloadInterval) { this.requestScheduled = stats.loading.start; } if (msn !== undefined && details.canBlockReload) { this.requestScheduled = stats.loading.first + reloadInterval - (details.partTarget * 1000 || 1000); } else if (this.requestScheduled === -1 || this.requestScheduled + reloadInterval < now) { this.requestScheduled = now; } else if (this.requestScheduled - now <= 0) { this.requestScheduled += reloadInterval; } let estimatedTimeUntilUpdate = this.requestScheduled - now; estimatedTimeUntilUpdate = Math.max(0, estimatedTimeUntilUpdate); this.log(`reload live playlist ${index} in ${Math.round(estimatedTimeUntilUpdate)} ms`); // this.log( // `live reload ${details.updated ? 'REFRESHED' : 'MISSED'} // reload in ${estimatedTimeUntilUpdate / 1000} // round trip ${(stats.loading.end - stats.loading.start) / 1000} // diff ${ // (reloadInterval - // (estimatedTimeUntilUpdate + // stats.loading.end - // stats.loading.start)) / // 1000 // } // reload interval ${reloadInterval / 1000} // target duration ${details.targetduration} // distance to edge ${distanceToLiveEdgeMs / 1000}` // ); this.timer = self.setTimeout(() => this.loadPlaylist(deliveryDirectives), estimatedTimeUntilUpdate); } else { this.clearTimer(); } } getDeliveryDirectives(details, previousDeliveryDirectives, msn, part) { let skip = getSkipValue(details, msn); if (previousDeliveryDirectives != null && previousDeliveryDirectives.skip && details.deltaUpdateFailed) { msn = previousDeliveryDirectives.msn; part = previousDeliveryDirectives.part; skip = HlsSkip.No; } return new HlsUrlParameters(msn, part, skip); } checkRetry(errorEvent) { const errorDetails = errorEvent.details; const isTimeout = isTimeoutError(errorEvent); const errorAction = errorEvent.errorAction; const { action, retryCount = 0, retryConfig } = errorAction || {}; const retry = !!errorAction && !!retryConfig && (action === NetworkErrorAction.RetryRequest || !errorAction.resolved && action === NetworkErrorAction.SendAlternateToPenaltyBox); if (retry) { var _errorEvent$context; this.requestScheduled = -1; if (retryCount >= retryConfig.maxNumRetry) { return false; } if (isTimeout && (_errorEvent$context = errorEvent.context) != null && _errorEvent$context.deliveryDirectives) { // The LL-HLS request already timed out so retry immediately this.warn(`Retrying playlist loading ${retryCount + 1}/${retryConfig.maxNumRetry} after "${errorDetails}" without delivery-directives`); this.loadPlaylist(); } else { const delay = getRetryDelay(retryConfig, retryCount); // Schedule level/track reload this.timer = self.setTimeout(() => this.loadPlaylist(), delay); this.warn(`Retrying playlist loading ${retryCount + 1}/${retryConfig.maxNumRetry} after "${errorDetails}" in ${delay}ms`); } // `levelRetry = true` used to inform other controllers that a retry is happening errorEvent.levelRetry = true; errorAction.resolved = true; } return retry; } } let chromeOrFirefox; class LevelController extends BasePlaylistController { constructor(hls, contentSteeringController) { super(hls, '[level-controller]'); this._levels = []; this._firstLevel = -1; this._startLevel = void 0; this.currentLevel = null; this.currentLevelIndex = -1; this.manualLevelIndex = -1; this.steering = void 0; this.onParsedComplete = void 0; this.steering = contentSteeringController; this._registerListeners(); } _registerListeners() { const { hls } = this; hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this); hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this); hls.on(Events.FRAG_LOADED, this.onFragLoaded, this); hls.on(Events.ERROR, this.onError, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this); hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this); hls.off(Events.FRAG_LOADED, this.onFragLoaded, this); hls.off(Events.ERROR, this.onError, this); } destroy() { this._unregisterListeners(); this.steering = null; this.resetLevels(); super.destroy(); } startLoad() { const levels = this._levels; // clean up live level details to force reload them, and reset load errors levels.forEach(level => { level.loadError = 0; level.fragmentError = 0; }); super.startLoad(); } resetLevels() { this._startLevel = undefined; this.manualLevelIndex = -1; this.currentLevelIndex = -1; this.currentLevel = null; this._levels = []; } onManifestLoading(event, data) { this.resetLevels(); } onManifestLoaded(event, data) { const levels = []; const levelSet = {}; let levelFromSet; // regroup redundant levels together data.levels.forEach(levelParsed => { var _levelParsed$audioCod; const attributes = levelParsed.attrs; // erase audio codec info if browser does not support mp4a.40.34. // demuxer will autodetect codec and fallback to mpeg/audio if (((_levelParsed$audioCod = levelParsed.audioCodec) == null ? void 0 : _levelParsed$audioCod.indexOf('mp4a.40.34')) !== -1) { chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent)); if (chromeOrFirefox) { levelParsed.audioCodec = undefined; } } const { AUDIO, CODECS, 'FRAME-RATE': FRAMERATE, 'PATHWAY-ID': PATHWAY, RESOLUTION, SUBTITLES } = attributes; const contentSteeringPrefix = `${PATHWAY || '.'}-` ; const levelKey = `${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}`; levelFromSet = levelSet[levelKey]; if (!levelFromSet) { levelFromSet = new Level(levelParsed); levelSet[levelKey] = levelFromSet; levels.push(levelFromSet); } else { levelFromSet.addFallback(levelParsed); } addGroupId(levelFromSet, 'audio', AUDIO); addGroupId(levelFromSet, 'text', SUBTITLES); }); this.filterAndSortMediaOptions(levels, data); } filterAndSortMediaOptions(unfilteredLevels, data) { let audioTracks = []; let subtitleTracks = []; let resolutionFound = false; let videoCodecFound = false; let audioCodecFound = false; // only keep levels with supported audio/video codecs let levels = unfilteredLevels.filter(({ audioCodec, videoCodec, width, height, unknownCodecs }) => { resolutionFound || (resolutionFound = !!(width && height)); videoCodecFound || (videoCodecFound = !!videoCodec); audioCodecFound || (audioCodecFound = !!audioCodec); return !(unknownCodecs != null && unknownCodecs.length) && (!audioCodec || isCodecSupportedInMp4(audioCodec, 'audio')) && (!videoCodec || isCodecSupportedInMp4(videoCodec, 'video')); }); // remove audio-only level if we also have levels with video codecs or RESOLUTION signalled if ((resolutionFound || videoCodecFound) && audioCodecFound) { levels = levels.filter(({ videoCodec, width, height }) => !!videoCodec || !!(width && height)); } if (levels.length === 0) { // Dispatch error after MANIFEST_LOADED is done propagating Promise.resolve().then(() => { if (this.hls) { const error = new Error('no level with compatible codecs found in manifest'); this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR, fatal: true, url: data.url, error, reason: error.message }); } }); return; } if (data.audioTracks) { audioTracks = data.audioTracks.filter(track => !track.audioCodec || isCodecSupportedInMp4(track.audioCodec, 'audio')); // Assign ids after filtering as array indices by group-id assignTrackIdsByGroup(audioTracks); } if (data.subtitles) { subtitleTracks = data.subtitles; assignTrackIdsByGroup(subtitleTracks); } // start bitrate is the first bitrate of the manifest const unsortedLevels = levels.slice(0); // sort levels from lowest to highest levels.sort((a, b) => { if (a.attrs['HDCP-LEVEL'] !== b.attrs['HDCP-LEVEL']) { return (a.attrs['HDCP-LEVEL'] || '') > (b.attrs['HDCP-LEVEL'] || '') ? 1 : -1; } if (a.bitrate !== b.bitrate) { return a.bitrate - b.bitrate; } if (a.attrs['FRAME-RATE'] !== b.attrs['FRAME-RATE']) { return a.attrs.decimalFloatingPoint('FRAME-RATE') - b.attrs.decimalFloatingPoint('FRAME-RATE'); } if (a.attrs.SCORE !== b.attrs.SCORE) { return a.attrs.decimalFloatingPoint('SCORE') - b.attrs.decimalFloatingPoint('SCORE'); } if (resolutionFound && a.height !== b.height) { return a.height - b.height; } return 0; }); let firstLevelInPlaylist = unsortedLevels[0]; if (this.steering) { levels = this.steering.filterParsedLevels(levels); if (levels.length !== unsortedLevels.length) { for (let i = 0; i < unsortedLevels.length; i++) { if (unsortedLevels[i].pathwayId === levels[0].pathwayId) { firstLevelInPlaylist = unsortedLevels[i]; break; } } } } this._levels = levels; // find index of first level in sorted levels for (let i = 0; i < levels.length; i++) { if (levels[i] === firstLevelInPlaylist) { this._firstLevel = i; this.log(`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelInPlaylist.bitrate}`); break; } } // Audio is only alternate if manifest include a URI along with the audio group tag, // and this is not an audio-only stream where levels contain audio-only const audioOnly = audioCodecFound && !videoCodecFound; const edata = { levels, audioTracks, subtitleTracks, sessionData: data.sessionData, sessionKeys: data.sessionKeys, firstLevel: this._firstLevel, stats: data.stats, audio: audioCodecFound, video: videoCodecFound, altAudio: !audioOnly && audioTracks.some(t => !!t.url) }; this.hls.trigger(Events.MANIFEST_PARSED, edata); // Initiate loading after all controllers have received MANIFEST_PARSED if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) { this.hls.startLoad(this.hls.config.startPosition); } } get levels() { if (this._levels.length === 0) { return null; } return this._levels; } get level() { return this.currentLevelIndex; } set level(newLevel) { const levels = this._levels; if (levels.length === 0) { return; } // check if level idx is valid if (newLevel < 0 || newLevel >= levels.length) { // invalid level id given, trigger error const error = new Error('invalid level idx'); const fatal = newLevel < 0; this.hls.trigger(Events.ERROR, { type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.LEVEL_SWITCH_ERROR, level: newLevel, fatal, error, reason: error.message }); if (fatal) { return; } newLevel = Math.min(newLevel, levels.length - 1); } const lastLevelIndex = this.currentLevelIndex; const lastLevel = this.currentLevel; const lastPathwayId = lastLevel ? lastLevel.attrs['PATHWAY-ID'] : undefined; const level = levels[newLevel]; const pathwayId = level.attrs['PATHWAY-ID']; this.currentLevelIndex = newLevel; this.currentLevel = level; if (lastLevelIndex === newLevel && level.details && lastLevel && lastPathwayId === pathwayId) { return; } this.log(`Switching to level ${newLevel}${pathwayId ? ' with Pathway ' + pathwayId : ''} from level ${lastLevelIndex}${lastPathwayId ? ' with Pathway ' + lastPathwayId : ''}`); const levelSwitchingData = _extends({}, level, { level: newLevel, maxBitrate: level.maxBitrate, attrs: level.attrs, uri: level.uri, urlId: level.urlId }); // @ts-ignore delete levelSwitchingData._attrs; // @ts-ignore delete levelSwitchingData._urlId; this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData); // check if we need to load playlist for this level const levelDetails = level.details; if (!levelDetails || levelDetails.live) { // level not retrieved yet, or live playlist we need to (re)load it const hlsUrlParameters = this.switchParams(level.uri, lastLevel == null ? void 0 : lastLevel.details); this.loadPlaylist(hlsUrlParameters); } } get manualLevel() { return this.manualLevelIndex; } set manualLevel(newLevel) { this.manualLevelIndex = newLevel; if (this._startLevel === undefined) { this._startLevel = newLevel; } if (newLevel !== -1) { this.level = newLevel; } } get firstLevel() { return this._firstLevel; } set firstLevel(newLevel) { this._firstLevel = newLevel; } get startLevel() { // hls.startLevel takes precedence over config.startLevel // if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest) if (this._startLevel === undefined) { const configStartLevel = this.hls.config.startLevel; if (configStartLevel !== undefined) { return configStartLevel; } else { return this._firstLevel; } } else { return this._startLevel; } } set startLevel(newLevel) { this._startLevel = newLevel; } onError(event, data) { if (data.fatal || !data.context) { return; } if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) { this.checkRetry(data); } } // reset errors on the successful load of a fragment onFragLoaded(event, { frag }) { if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) { const level = this._levels[frag.level]; if (level !== undefined) { level.loadError = 0; } } } onLevelLoaded(event, data) { var _data$deliveryDirecti2; const { level, details } = data; const curLevel = this._levels[level]; if (!curLevel) { var _data$deliveryDirecti; this.warn(`Invalid level index ${level}`); if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) { details.deltaUpdateFailed = true; } return; } // only process level loaded events matching with expected level if (level === this.currentLevelIndex) { // reset level load error counter on successful level loaded only if there is no issues with fragments if (curLevel.fragmentError === 0) { curLevel.loadError = 0; } this.playlistLoaded(level, data, curLevel.details); } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) { // received a delta playlist update that cannot be merged details.deltaUpdateFailed = true; } } onAudioTrackSwitched(event, data) { const currentLevel = this.currentLevel; if (!currentLevel) { return; } const audioGroupId = this.hls.audioTracks[data.id].groupId; if (currentLevel.audioGroupIds && currentLevel.audioGroupId !== audioGroupId) { let urlId = -1; for (let i = 0; i < currentLevel.audioGroupIds.length; i++) { if (currentLevel.audioGroupIds[i] === audioGroupId) { urlId = i; break; } } if (urlId !== -1 && urlId !== currentLevel.urlId) { currentLevel.urlId = urlId; if (this.canLoad) { this.startLoad(); } } } } loadPlaylist(hlsUrlParameters) { super.loadPlaylist(); const currentLevelIndex = this.currentLevelIndex; const currentLevel = this.currentLevel; if (currentLevel && this.shouldLoadPlaylist(currentLevel)) { const id = currentLevel.urlId; let url = currentLevel.uri; if (hlsUrlParameters) { try { url = hlsUrlParameters.addDirectives(url); } catch (error) { this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`); } } const pathwayId = currentLevel.attrs['PATHWAY-ID']; this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? void 0 : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''} with${pathwayId ? ' Pathway ' + pathwayId : ''} URI ${id + 1}/${currentLevel.url.length} ${url}`); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId); // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level); this.clearTimer(); this.hls.trigger(Events.LEVEL_LOADING, { url, level: currentLevelIndex, id, deliveryDirectives: hlsUrlParameters || null }); } } get nextLoadLevel() { if (this.manualLevelIndex !== -1) { return this.manualLevelIndex; } else { return this.hls.nextAutoLevel; } } set nextLoadLevel(nextLevel) { this.level = nextLevel; if (this.manualLevelIndex === -1) { this.hls.nextAutoLevel = nextLevel; } } removeLevel(levelIndex, urlId) { const filterLevelAndGroupByIdIndex = (url, id) => id !== urlId; const levels = this._levels.filter((level, index) => { if (index !== levelIndex) { return true; } if (level.url.length > 1 && urlId !== undefined) { level.url = level.url.filter(filterLevelAndGroupByIdIndex); if (level.audioGroupIds) { level.audioGroupIds = level.audioGroupIds.filter(filterLevelAndGroupByIdIndex); } if (level.textGroupIds) { level.textGroupIds = level.textGroupIds.filter(filterLevelAndGroupByIdIndex); } level.urlId = 0; return true; } if (this.steering) { this.steering.removeLevel(level); } return false; }); this.hls.trigger(Events.LEVELS_UPDATED, { levels }); } onLevelsUpdated(event, { levels }) { levels.forEach((level, index) => { const { details } = level; if (details != null && details.fragments) { details.fragments.forEach(fragment => { fragment.level = index; }); } }); this._levels = levels; } } function addGroupId(level, type, id) { if (!id) { return; } if (type === 'audio') { if (!level.audioGroupIds) { level.audioGroupIds = []; } level.audioGroupIds[level.url.length - 1] = id; } else if (type === 'text') { if (!level.textGroupIds) { level.textGroupIds = []; } level.textGroupIds[level.url.length - 1] = id; } } function assignTrackIdsByGroup(tracks) { const groups = {}; tracks.forEach(track => { const groupId = track.groupId || ''; track.id = groups[groupId] = groups[groupId] || 0; groups[groupId]++; }); } var FragmentState = { NOT_LOADED: "NOT_LOADED", APPENDING: "APPENDING", PARTIAL: "PARTIAL", OK: "OK" }; class FragmentTracker { constructor(hls) { this.activePartLists = Object.create(null); this.endListFragments = Object.create(null); this.fragments = Object.create(null); this.timeRanges = Object.create(null); this.bufferPadding = 0.2; this.hls = void 0; this.hasGaps = false; this.hls = hls; this._registerListeners(); } _registerListeners() { const { hls } = this; hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this); hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this); hls.on(Events.FRAG_LOADED, this.onFragLoaded, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this); hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this); hls.off(Events.FRAG_LOADED, this.onFragLoaded, this); } destroy() { this._unregisterListeners(); // @ts-ignore this.fragments = // @ts-ignore this.activePartLists = // @ts-ignore this.endListFragments = this.timeRanges = null; } /** * Return a Fragment or Part with an appended range that matches the position and levelType * Otherwise, return null */ getAppendedFrag(position, levelType) { const activeParts = this.activePartLists[levelType]; if (activeParts) { for (let i = activeParts.length; i--;) { const activePart = activeParts[i]; if (!activePart) { break; } const appendedPTS = activePart.end; if (activePart.start <= position && appendedPTS !== null && position <= appendedPTS) { return activePart; } } } return this.getBufferedFrag(position, levelType); } /** * Return a buffered Fragment that matches the position and levelType. * A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted). * If not found any Fragment, return null */ getBufferedFrag(position, levelType) { const { fragments } = this; const keys = Object.keys(fragments); for (let i = keys.length; i--;) { const fragmentEntity = fragments[keys[i]]; if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) { const frag = fragmentEntity.body; if (frag.start <= position && position <= frag.end) { return frag; } } } return null; } /** * Partial fragments effected by coded frame eviction will be removed * The browser will unload parts of the buffer to free up memory for new buffer data * Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable) */ detectEvictedFragments(elementaryStream, timeRange, playlistType, appendedPart) { if (this.timeRanges) { this.timeRanges[elementaryStream] = timeRange; } // Check if any flagged fragments have been unloaded // excluding anything newer than appendedPartSn const appendedPartSn = (appendedPart == null ? void 0 : appendedPart.fragment.sn) || -1; Object.keys(this.fragments).forEach(key => { const fragmentEntity = this.fragments[key]; if (!fragmentEntity) { return; } if (appendedPartSn >= fragmentEntity.body.sn) { return; } if (!fragmentEntity.buffered && !fragmentEntity.loaded) { if (fragmentEntity.body.type === playlistType) { this.removeFragment(fragmentEntity.body); } return; } const esData = fragmentEntity.range[elementaryStream]; if (!esData) { return; } esData.time.some(time => { const isNotBuffered = !this.isTimeBuffered(time.startPTS, time.endPTS, timeRange); if (isNotBuffered) { // Unregister partial fragment as it needs to load again to be reused this.removeFragment(fragmentEntity.body); } return isNotBuffered; }); }); } /** * Checks if the fragment passed in is loaded in the buffer properly * Partially loaded fragments will be registered as a partial fragment */ detectPartialFragments(data) { const timeRanges = this.timeRanges; const { frag, part } = data; if (!timeRanges || frag.sn === 'initSegment') { return; } const fragKey = getFragmentKey(frag); const fragmentEntity = this.fragments[fragKey]; if (!fragmentEntity || fragmentEntity.buffered && frag.gap) { return; } const isFragHint = !frag.relurl; Object.keys(timeRanges).forEach(elementaryStream => { const streamInfo = frag.elementaryStreams[elementaryStream]; if (!streamInfo) { return; } const timeRange = timeRanges[elementaryStream]; const partial = isFragHint || streamInfo.partial === true; fragmentEntity.range[elementaryStream] = this.getBufferedTimes(frag, part, partial, timeRange); }); fragmentEntity.loaded = null; if (Object.keys(fragmentEntity.range).length) { fragmentEntity.buffered = true; const endList = fragmentEntity.body.endList = frag.endList || fragmentEntity.body.endList; if (endList) { this.endListFragments[fragmentEntity.body.type] = fragmentEntity; } if (!isPartial(fragmentEntity)) { // Remove older fragment parts from lookup after frag is tracked as buffered this.removeParts(frag.sn - 1, frag.type); } } else { // remove fragment if nothing was appended this.removeFragment(fragmentEntity.body); } } removeParts(snToKeep, levelType) { const activeParts = this.activePartLists[levelType]; if (!activeParts) { return; } this.activePartLists[levelType] = activeParts.filter(part => part.fragment.sn >= snToKeep); } fragBuffered(frag, force) { const fragKey = getFragmentKey(frag); let fragmentEntity = this.fragments[fragKey]; if (!fragmentEntity && force) { fragmentEntity = this.fragments[fragKey] = { body: frag, appendedPTS: null, loaded: null, buffered: false, range: Object.create(null) }; if (frag.gap) { this.hasGaps = true; } } if (fragmentEntity) { fragmentEntity.loaded = null; fragmentEntity.buffered = true; } } getBufferedTimes(fragment, part, partial, timeRange) { const buffered = { time: [], partial }; const startPTS = fragment.start; const endPTS = fragment.end; const minEndPTS = fragment.minEndPTS || endPTS; const maxStartPTS = fragment.maxStartPTS || startPTS; for (let i = 0; i < timeRange.length; i++) { const startTime = timeRange.start(i) - this.bufferPadding; const endTime = timeRange.end(i) + this.bufferPadding; if (maxStartPTS >= startTime && minEndPTS <= endTime) { // Fragment is entirely contained in buffer // No need to check the other timeRange times since it's completely playable buffered.time.push({ startPTS: Math.max(startPTS, timeRange.start(i)), endPTS: Math.min(endPTS, timeRange.end(i)) }); break; } else if (startPTS < endTime && endPTS > startTime) { buffered.partial = true; // Check for intersection with buffer // Get playable sections of the fragment buffered.time.push({ startPTS: Math.max(startPTS, timeRange.start(i)), endPTS: Math.min(endPTS, timeRange.end(i)) }); } else if (endPTS <= startTime) { // No need to check the rest of the timeRange as it is in order break; } } return buffered; } /** * Gets the partial fragment for a certain time */ getPartialFragment(time) { let bestFragment = null; let timePadding; let startTime; let endTime; let bestOverlap = 0; const { bufferPadding, fragments } = this; Object.keys(fragments).forEach(key => { const fragmentEntity = fragments[key]; if (!fragmentEntity) { return; } if (isPartial(fragmentEntity)) { startTime = fragmentEntity.body.start - bufferPadding; endTime = fragmentEntity.body.end + bufferPadding; if (time >= startTime && time <= endTime) { // Use the fragment that has the most padding from start and end time timePadding = Math.min(time - startTime, endTime - time); if (bestOverlap <= timePadding) { bestFragment = fragmentEntity.body; bestOverlap = timePadding; } } } }); return bestFragment; } isEndListAppended(type) { const lastFragmentEntity = this.endListFragments[type]; return lastFragmentEntity !== undefined && (lastFragmentEntity.buffered || isPartial(lastFragmentEntity)); } getState(fragment) { const fragKey = getFragmentKey(fragment); const fragmentEntity = this.fragments[fragKey]; if (fragmentEntity) { if (!fragmentEntity.buffered) { return FragmentState.APPENDING; } else if (isPartial(fragmentEntity)) { return FragmentState.PARTIAL; } else { return FragmentState.OK; } } return FragmentState.NOT_LOADED; } isTimeBuffered(startPTS, endPTS, timeRange) { let startTime; let endTime; for (let i = 0; i < timeRange.length; i++) { startTime = timeRange.start(i) - this.bufferPadding; endTime = timeRange.end(i) + this.bufferPadding; if (startPTS >= startTime && endPTS <= endTime) { return true; } if (endPTS <= startTime) { // No need to check the rest of the timeRange as it is in order return false; } } return false; } onFragLoaded(event, data) { const { frag, part } = data; // don't track initsegment (for which sn is not a number) // don't track frags used for bitrateTest, they're irrelevant. if (frag.sn === 'initSegment' || frag.bitrateTest) { return; } // Fragment entity `loaded` FragLoadedData is null when loading parts const loaded = part ? null : data; const fragKey = getFragmentKey(frag); this.fragments[fragKey] = { body: frag, appendedPTS: null, loaded, buffered: false, range: Object.create(null) }; } onBufferAppended(event, data) { const { frag, part, timeRanges } = data; if (frag.sn === 'initSegment') { return; } const playlistType = frag.type; if (part) { let activeParts = this.activePartLists[playlistType]; if (!activeParts) { this.activePartLists[playlistType] = activeParts = []; } activeParts.push(part); } // Store the latest timeRanges loaded in the buffer this.timeRanges = timeRanges; Object.keys(timeRanges).forEach(elementaryStream => { const timeRange = timeRanges[elementaryStream]; this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part); }); } onFragBuffered(event, data) { this.detectPartialFragments(data); } hasFragment(fragment) { const fragKey = getFragmentKey(fragment); return !!this.fragments[fragKey]; } hasParts(type) { var _this$activePartLists; return !!((_this$activePartLists = this.activePartLists[type]) != null && _this$activePartLists.length); } removeFragmentsInRange(start, end, playlistType, withGapOnly, unbufferedOnly) { if (withGapOnly && !this.hasGaps) { return; } Object.keys(this.fragments).forEach(key => { const fragmentEntity = this.fragments[key]; if (!fragmentEntity) { return; } const frag = fragmentEntity.body; if (frag.type !== playlistType || withGapOnly && !frag.gap) { return; } if (frag.start < end && frag.end > start && (fragmentEntity.buffered || unbufferedOnly)) { this.removeFragment(frag); } }); } removeFragment(fragment) { const fragKey = getFragmentKey(fragment); fragment.stats.loaded = 0; fragment.clearElementaryStreamInfo(); const activeParts = this.activePartLists[fragment.type]; if (activeParts) { const snToRemove = fragment.sn; this.activePartLists[fragment.type] = activeParts.filter(part => part.fragment.sn !== snToRemove); } delete this.fragments[fragKey]; if (fragment.endList) { delete this.endListFragments[fragment.type]; } } removeAllFragments() { this.fragments = Object.create(null); this.endListFragments = Object.create(null); this.activePartLists = Object.create(null); this.hasGaps = false; } } function isPartial(fragmentEntity) { var _fragmentEntity$range, _fragmentEntity$range2, _fragmentEntity$range3; return fragmentEntity.buffered && (fragmentEntity.body.gap || ((_fragmentEntity$range = fragmentEntity.range.video) == null ? void 0 : _fragmentEntity$range.partial) || ((_fragmentEntity$range2 = fragmentEntity.range.audio) == null ? void 0 : _fragmentEntity$range2.partial) || ((_fragmentEntity$range3 = fragmentEntity.range.audiovideo) == null ? void 0 : _fragmentEntity$range3.partial)); } function getFragmentKey(fragment) { return `${fragment.type}_${fragment.level}_${fragment.urlId}_${fragment.sn}`; } const MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb class FragmentLoader { constructor(config) { this.config = void 0; this.loader = null; this.partLoadTimeout = -1; this.config = config; } destroy() { if (this.loader) { this.loader.destroy(); this.loader = null; } } abort() { if (this.loader) { // Abort the loader for current fragment. Only one may load at any given time this.loader.abort(); } } load(frag, onProgress) { const url = frag.url; if (!url) { return Promise.reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag, error: new Error(`Fragment does not have a ${url ? 'part list' : 'url'}`), networkDetails: null })); } this.abort(); const config = this.config; const FragmentILoader = config.fLoader; const DefaultILoader = config.loader; return new Promise((resolve, reject) => { if (this.loader) { this.loader.destroy(); } if (frag.gap) { if (frag.tagList.some(tags => tags[0] === 'GAP')) { reject(createGapLoadError(frag)); return; } else { // Reset temporary treatment as GAP tag frag.gap = false; } } const loader = this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config); const loaderContext = createLoaderContext(frag); const loadPolicy = getLoaderConfigWithoutReties(config.fragLoadPolicy.default); const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: 0, retryDelay: 0, maxRetryDelay: 0, highWaterMark: frag.sn === 'initSegment' ? Infinity : MIN_CHUNK_SIZE }; // Assign frag stats to the loader's stats reference frag.stats = loader.stats; loader.load(loaderContext, loaderConfig, { onSuccess: (response, stats, context, networkDetails) => { this.resetLoader(frag, loader); let payload = response.data; if (context.resetIV && frag.decryptdata) { frag.decryptdata.iv = new Uint8Array(payload.slice(0, 16)); payload = payload.slice(16); } resolve({ frag, part: null, payload, networkDetails }); }, onError: (response, context, networkDetails, stats) => { this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag, response: _objectSpread2({ url, data: undefined }, response), error: new Error(`HTTP Error ${response.code} ${response.text}`), networkDetails, stats })); }, onAbort: (stats, context, networkDetails) => { this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.INTERNAL_ABORTED, fatal: false, frag, error: new Error('Aborted'), networkDetails, stats })); }, onTimeout: (stats, context, networkDetails) => { this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag, error: new Error(`Timeout after ${loaderConfig.timeout}ms`), networkDetails, stats })); }, onProgress: (stats, context, data, networkDetails) => { if (onProgress) { onProgress({ frag, part: null, payload: data, networkDetails }); } } }); }); } loadPart(frag, part, onProgress) { this.abort(); const config = this.config; const FragmentILoader = config.fLoader; const DefaultILoader = config.loader; return new Promise((resolve, reject) => { if (this.loader) { this.loader.destroy(); } if (frag.gap || part.gap) { reject(createGapLoadError(frag, part)); return; } const loader = this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config); const loaderContext = createLoaderContext(frag, part); // Should we define another load policy for parts? const loadPolicy = getLoaderConfigWithoutReties(config.fragLoadPolicy.default); const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: 0, retryDelay: 0, maxRetryDelay: 0, highWaterMark: MIN_CHUNK_SIZE }; // Assign part stats to the loader's stats reference part.stats = loader.stats; loader.load(loaderContext, loaderConfig, { onSuccess: (response, stats, context, networkDetails) => { this.resetLoader(frag, loader); this.updateStatsFromPart(frag, part); const partLoadedData = { frag, part, payload: response.data, networkDetails }; onProgress(partLoadedData); resolve(partLoadedData); }, onError: (response, context, networkDetails, stats) => { this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag, part, response: _objectSpread2({ url: loaderContext.url, data: undefined }, response), error: new Error(`HTTP Error ${response.code} ${response.text}`), networkDetails, stats })); }, onAbort: (stats, context, networkDetails) => { frag.stats.aborted = part.stats.aborted; this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.INTERNAL_ABORTED, fatal: false, frag, part, error: new Error('Aborted'), networkDetails, stats })); }, onTimeout: (stats, context, networkDetails) => { this.resetLoader(frag, loader); reject(new LoadError({ type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag, part, error: new Error(`Timeout after ${loaderConfig.timeout}ms`), networkDetails, stats })); } }); }); } updateStatsFromPart(frag, part) { const fragStats = frag.stats; const partStats = part.stats; const partTotal = partStats.total; fragStats.loaded += partStats.loaded; if (partTotal) { const estTotalParts = Math.round(frag.duration / part.duration); const estLoadedParts = Math.min(Math.round(fragStats.loaded / partTotal), estTotalParts); const estRemainingParts = estTotalParts - estLoadedParts; const estRemainingBytes = estRemainingParts * Math.round(fragStats.loaded / estLoadedParts); fragStats.total = fragStats.loaded + estRemainingBytes; } else { fragStats.total = Math.max(fragStats.loaded, fragStats.total); } const fragLoading = fragStats.loading; const partLoading = partStats.loading; if (fragLoading.start) { // add to fragment loader latency fragLoading.first += partLoading.first - partLoading.start; } else { fragLoading.start = partLoading.start; fragLoading.first = partLoading.first; } fragLoading.end = partLoading.end; } resetLoader(frag, loader) { frag.loader = null; if (this.loader === loader) { self.clearTimeout(this.partLoadTimeout); this.loader = null; } loader.destroy(); } } function createLoaderContext(frag, part = null) { const segment = part || frag; const loaderContext = { frag, part, responseType: 'arraybuffer', url: segment.url, headers: {}, rangeStart: 0, rangeEnd: 0 }; const start = segment.byteRangeStartOffset; const end = segment.byteRangeEndOffset; if (isFiniteNumber(start) && isFiniteNumber(end)) { var _frag$decryptdata; let byteRangeStart = start; let byteRangeEnd = end; if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') { // MAP segment encrypted with method 'AES-128', when served with HTTP Range, // has the unencrypted size specified in the range. // Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6 const fragmentLen = end - start; if (fragmentLen % 16) { byteRangeEnd = end + (16 - fragmentLen % 16); } if (start !== 0) { loaderContext.resetIV = true; byteRangeStart = start - 16; } } loaderContext.rangeStart = byteRangeStart; loaderContext.rangeEnd = byteRangeEnd; } return loaderContext; } function createGapLoadError(frag, part) { const error = new Error(`GAP ${frag.gap ? 'tag' : 'attribute'} found`); const errorData = { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_GAP, fatal: false, frag, error, networkDetails: null }; if (part) { errorData.part = part; } (part ? part : frag).stats.aborted = true; return new LoadError(errorData); } class LoadError extends Error { constructor(data) { super(data.error.message); this.data = void 0; this.data = data; } } class KeyLoader { constructor(config) { this.config = void 0; this.keyUriToKeyInfo = {}; this.emeController = null; this.config = config; } abort(type) { for (const uri in this.keyUriToKeyInfo) { const loader = this.keyUriToKeyInfo[uri].loader; if (loader) { if (type && type !== loader.context.frag.type) { return; } loader.abort(); } } } detach() { for (const uri in this.keyUriToKeyInfo) { const keyInfo = this.keyUriToKeyInfo[uri]; // Remove cached EME keys on detach if (keyInfo.mediaKeySessionContext || keyInfo.decryptdata.isCommonEncryption) { delete this.keyUriToKeyInfo[uri]; } } } destroy() { this.detach(); for (const uri in this.keyUriToKeyInfo) { const loader = this.keyUriToKeyInfo[uri].loader; if (loader) { loader.destroy(); } } this.keyUriToKeyInfo = {}; } createKeyLoadError(frag, details = ErrorDetails.KEY_LOAD_ERROR, error, networkDetails, response) { return new LoadError({ type: ErrorTypes.NETWORK_ERROR, details, fatal: false, frag, response, error, networkDetails }); } loadClear(loadingFrag, encryptedFragments) { if (this.emeController && this.config.emeEnabled) { // access key-system with nearest key on start (loaidng frag is unencrypted) const { sn, cc } = loadingFrag; for (let i = 0; i < encryptedFragments.length; i++) { const frag = encryptedFragments[i]; if (cc <= frag.cc && (sn === 'initSegment' || frag.sn === 'initSegment' || sn < frag.sn)) { this.emeController.selectKeySystemFormat(frag).then(keySystemFormat => { frag.setKeyFormat(keySystemFormat); }); break; } } } } load(frag) { if (!frag.decryptdata && frag.encrypted && this.emeController) { // Multiple keys, but none selected, resolve in eme-controller return this.emeController.selectKeySystemFormat(frag).then(keySystemFormat => { return this.loadInternal(frag, keySystemFormat); }); } return this.loadInternal(frag); } loadInternal(frag, keySystemFormat) { var _keyInfo, _keyInfo2; if (keySystemFormat) { frag.setKeyFormat(keySystemFormat); } const decryptdata = frag.decryptdata; if (!decryptdata) { const error = new Error(keySystemFormat ? `Expected frag.decryptdata to be defined after setting format ${keySystemFormat}` : 'Missing decryption data on fragment in onKeyLoading'); return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, error)); } const uri = decryptdata.uri; if (!uri) { return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Invalid key URI: "${uri}"`))); } let keyInfo = this.keyUriToKeyInfo[uri]; if ((_keyInfo = keyInfo) != null && _keyInfo.decryptdata.key) { decryptdata.key = keyInfo.decryptdata.key; return Promise.resolve({ frag, keyInfo }); } // Return key load promise as long as it does not have a mediakey session with an unusable key status if ((_keyInfo2 = keyInfo) != null && _keyInfo2.keyLoadPromise) { var _keyInfo$mediaKeySess; switch ((_keyInfo$mediaKeySess = keyInfo.mediaKeySessionContext) == null ? void 0 : _keyInfo$mediaKeySess.keyStatus) { case undefined: case 'status-pending': case 'usable': case 'usable-in-future': return keyInfo.keyLoadPromise.then(keyLoadedData => { // Return the correct fragment with updated decryptdata key and loaded keyInfo decryptdata.key = keyLoadedData.keyInfo.decryptdata.key; return { frag, keyInfo }; }); } // If we have a key session and status and it is not pending or usable, continue // This will go back to the eme-controller for expired keys to get a new keyLoadPromise } // Load the key or return the loading promise keyInfo = this.keyUriToKeyInfo[uri] = { decryptdata, keyLoadPromise: null, loader: null, mediaKeySessionContext: null }; switch (decryptdata.method) { case 'ISO-23001-7': case 'SAMPLE-AES': case 'SAMPLE-AES-CENC': case 'SAMPLE-AES-CTR': if (decryptdata.keyFormat === 'identity') { // loadKeyHTTP handles http(s) and data URLs return this.loadKeyHTTP(keyInfo, frag); } return this.loadKeyEME(keyInfo, frag); case 'AES-128': return this.loadKeyHTTP(keyInfo, frag); default: return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`))); } } loadKeyEME(keyInfo, frag) { const keyLoadedData = { frag, keyInfo }; if (this.emeController && this.config.emeEnabled) { const keySessionContextPromise = this.emeController.loadKey(keyLoadedData); if (keySessionContextPromise) { return (keyInfo.keyLoadPromise = keySessionContextPromise.then(keySessionContext => { keyInfo.mediaKeySessionContext = keySessionContext; return keyLoadedData; })).catch(error => { // Remove promise for license renewal or retry keyInfo.keyLoadPromise = null; throw error; }); } } return Promise.resolve(keyLoadedData); } loadKeyHTTP(keyInfo, frag) { const config = this.config; const Loader = config.loader; const keyLoader = new Loader(config); frag.keyLoader = keyInfo.loader = keyLoader; return keyInfo.keyLoadPromise = new Promise((resolve, reject) => { const loaderContext = { keyInfo, frag, responseType: 'arraybuffer', url: keyInfo.decryptdata.uri }; // maxRetry is 0 so that instead of retrying the same key on the same variant multiple times, // key-loader will trigger an error and rely on stream-controller to handle retry logic. // this will also align retry logic with fragment-loader const loadPolicy = config.keyLoadPolicy.default; const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: 0, retryDelay: 0, maxRetryDelay: 0 }; const loaderCallbacks = { onSuccess: (response, stats, context, networkDetails) => { const { frag, keyInfo, url: uri } = context; if (!frag.decryptdata || keyInfo !== this.keyUriToKeyInfo[uri]) { return reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error('after key load, decryptdata unset or changed'), networkDetails)); } keyInfo.decryptdata.key = frag.decryptdata.key = new Uint8Array(response.data); // detach fragment key loader on load success frag.keyLoader = null; keyInfo.loader = null; resolve({ frag, keyInfo }); }, onError: (response, context, networkDetails, stats) => { this.resetLoader(context); reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`HTTP Error ${response.code} loading key ${response.text}`), networkDetails, _objectSpread2({ url: loaderContext.url, data: undefined }, response))); }, onTimeout: (stats, context, networkDetails) => { this.resetLoader(context); reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_TIMEOUT, new Error('key loading timed out'), networkDetails)); }, onAbort: (stats, context, networkDetails) => { this.resetLoader(context); reject(this.createKeyLoadError(frag, ErrorDetails.INTERNAL_ABORTED, new Error('key loading aborted'), networkDetails)); } }; keyLoader.load(loaderContext, loaderConfig, loaderCallbacks); }); } resetLoader(context) { const { frag, keyInfo, url: uri } = context; const loader = keyInfo.loader; if (frag.keyLoader === loader) { frag.keyLoader = null; keyInfo.loader = null; } delete this.keyUriToKeyInfo[uri]; if (loader) { loader.destroy(); } } } /** * @ignore * Sub-class specialization of EventHandler base class. * * TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop, * scheduled asynchroneously, avoiding recursive calls in the same tick. * * The task itself is implemented in `doTick`. It can be requested and called for single execution * using the `tick` method. * * It will be assured that the task execution method (`tick`) only gets called once per main loop "tick", * no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly. * * If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`, * and cancelled with `clearNextTick`. * * The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`). * * Sub-classes need to implement the `doTick` method which will effectively have the task execution routine. * * Further explanations: * * The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously * only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks. * * When the task execution (`tick` method) is called in re-entrant way this is detected and * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo). */ class TaskLoop { constructor() { this._boundTick = void 0; this._tickTimer = null; this._tickInterval = null; this._tickCallCount = 0; this._boundTick = this.tick.bind(this); } destroy() { this.onHandlerDestroying(); this.onHandlerDestroyed(); } onHandlerDestroying() { // clear all timers before unregistering from event bus this.clearNextTick(); this.clearInterval(); } onHandlerDestroyed() {} hasInterval() { return !!this._tickInterval; } hasNextTick() { return !!this._tickTimer; } /** * @param millis - Interval time (ms) * @eturns True when interval has been scheduled, false when already scheduled (no effect) */ setInterval(millis) { if (!this._tickInterval) { this._tickCallCount = 0; this._tickInterval = self.setInterval(this._boundTick, millis); return true; } return false; } /** * @returns True when interval was cleared, false when none was set (no effect) */ clearInterval() { if (this._tickInterval) { self.clearInterval(this._tickInterval); this._tickInterval = null; return true; } return false; } /** * @returns True when timeout was cleared, false when none was set (no effect) */ clearNextTick() { if (this._tickTimer) { self.clearTimeout(this._tickTimer); this._tickTimer = null; return true; } return false; } /** * Will call the subclass doTick implementation in this main loop tick * or in the next one (via setTimeout(,0)) in case it has already been called * in this tick (in case this is a re-entrant call). */ tick() { this._tickCallCount++; if (this._tickCallCount === 1) { this.doTick(); // re-entrant call to tick from previous doTick call stack // -> schedule a call on the next main loop iteration to process this task processing request if (this._tickCallCount > 1) { // make sure only one timer exists at any time at max this.tickImmediate(); } this._tickCallCount = 0; } } tickImmediate() { this.clearNextTick(); this._tickTimer = self.setTimeout(this._boundTick, 0); } /** * For subclass to implement task logic * @abstract */ doTick() {} } /** * Provides methods dealing with buffer length retrieval for example. * * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property. * * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered */ const noopBuffered = { length: 0, start: () => 0, end: () => 0 }; class BufferHelper { /** * Return true if `media`'s buffered include `position` */ static isBuffered(media, position) { try { if (media) { const buffered = BufferHelper.getBuffered(media); for (let i = 0; i < buffered.length; i++) { if (position >= buffered.start(i) && position <= buffered.end(i)) { return true; } } } } catch (error) { // this is to catch // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer': // This SourceBuffer has been removed from the parent media source } return false; } static bufferInfo(media, pos, maxHoleDuration) { try { if (media) { const vbuffered = BufferHelper.getBuffered(media); const buffered = []; let i; for (i = 0; i < vbuffered.length; i++) { buffered.push({ start: vbuffered.start(i), end: vbuffered.end(i) }); } return this.bufferedInfo(buffered, pos, maxHoleDuration); } } catch (error) { // this is to catch // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer': // This SourceBuffer has been removed from the parent media source } return { len: 0, start: pos, end: pos, nextStart: undefined }; } static bufferedInfo(buffered, pos, maxHoleDuration) { pos = Math.max(0, pos); // sort on buffer.start/smaller end (IE does not always return sorted buffered range) buffered.sort(function (a, b) { const diff = a.start - b.start; if (diff) { return diff; } else { return b.end - a.end; } }); let buffered2 = []; if (maxHoleDuration) { // there might be some small holes between buffer time range // consider that holes smaller than maxHoleDuration are irrelevant and build another // buffer time range representations that discards those holes for (let i = 0; i < buffered.length; i++) { const buf2len = buffered2.length; if (buf2len) { const buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative) if (buffered[i].start - buf2end < maxHoleDuration) { // merge overlapping time ranges // update lastRange.end only if smaller than item.end // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end) // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15]) if (buffered[i].end > buf2end) { buffered2[buf2len - 1].end = buffered[i].end; } } else { // big hole buffered2.push(buffered[i]); } } else { // first value buffered2.push(buffered[i]); } } } else { buffered2 = buffered; } let bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below let bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position let bufferStart = pos; let bufferEnd = pos; for (let i = 0; i < buffered2.length; i++) { const start = buffered2[i].start; const end = buffered2[i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i)); if (pos + maxHoleDuration >= start && pos < end) { // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length bufferStart = start; bufferEnd = end; bufferLen = bufferEnd - pos; } else if (pos + maxHoleDuration < start) { bufferStartNext = start; break; } } return { len: bufferLen, start: bufferStart || 0, end: bufferEnd || 0, nextStart: bufferStartNext }; } /** * Safe method to get buffered property. * SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource */ static getBuffered(media) { try { return media.buffered; } catch (e) { logger.log('failed to get media.buffered', e); return noopBuffered; } } } class ChunkMetadata { constructor(level, sn, id, size = 0, part = -1, partial = false) { this.level = void 0; this.sn = void 0; this.part = void 0; this.id = void 0; this.size = void 0; this.partial = void 0; this.transmuxing = getNewPerformanceTiming(); this.buffering = { audio: getNewPerformanceTiming(), video: getNewPerformanceTiming(), audiovideo: getNewPerformanceTiming() }; this.level = level; this.sn = sn; this.id = id; this.size = size; this.part = part; this.partial = partial; } } function getNewPerformanceTiming() { return { start: 0, executeStart: 0, executeEnd: 0, end: 0 }; } function findFirstFragWithCC(fragments, cc) { let firstFrag = null; for (let i = 0, len = fragments.length; i < len; i++) { const currentFrag = fragments[i]; if (currentFrag && currentFrag.cc === cc) { firstFrag = currentFrag; break; } } return firstFrag; } function shouldAlignOnDiscontinuities(lastFrag, lastLevel, details) { if (lastLevel.details) { if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) { return true; } } return false; } // Find the first frag in the previous level which matches the CC of the first frag of the new level function findDiscontinuousReferenceFrag(prevDetails, curDetails, referenceIndex = 0) { const prevFrags = prevDetails.fragments; const curFrags = curDetails.fragments; if (!curFrags.length || !prevFrags.length) { logger.log('No fragments to align'); return; } const prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc); if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) { logger.log('No frag in previous level to align on'); return; } return prevStartFrag; } function adjustFragmentStart(frag, sliding) { if (frag) { const start = frag.start + sliding; frag.start = frag.startPTS = start; frag.endPTS = start + frag.duration; } } function adjustSlidingStart(sliding, details) { // Update segments const fragments = details.fragments; for (let i = 0, len = fragments.length; i < len; i++) { adjustFragmentStart(fragments[i], sliding); } // Update LL-HLS parts at the end of the playlist if (details.fragmentHint) { adjustFragmentStart(details.fragmentHint, sliding); } details.alignedSliding = true; } /** * Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a * contiguous stream with the last fragments. * The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to * download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time * and an extra download. * @param lastFrag * @param lastLevel * @param details */ function alignStream(lastFrag, lastLevel, details) { if (!lastLevel) { return; } alignDiscontinuities(lastFrag, details, lastLevel); if (!details.alignedSliding && lastLevel.details) { // If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level. // Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same // discontinuity sequence. alignPDT(details, lastLevel.details); } if (!details.alignedSliding && lastLevel.details && !details.skippedSegments) { // Try to align on sn so that we pick a better start fragment. // Do not perform this on playlists with delta updates as this is only to align levels on switch // and adjustSliding only adjusts fragments after skippedSegments. adjustSliding(lastLevel.details, details); } } /** * Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same * discontinuity sequence. * @param lastFrag - The last Fragment which shares the same discontinuity sequence * @param lastLevel - The details of the last loaded level * @param details - The details of the new level */ function alignDiscontinuities(lastFrag, details, lastLevel) { if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) { const referenceFrag = findDiscontinuousReferenceFrag(lastLevel.details, details); if (referenceFrag && isFiniteNumber(referenceFrag.start)) { logger.log(`Adjusting PTS using last level due to CC increase within current level ${details.url}`); adjustSlidingStart(referenceFrag.start, details); } } } /** * Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level. * @param details - The details of the new level * @param lastDetails - The details of the last loaded level */ function alignPDT(details, lastDetails) { // This check protects the unsafe "!" usage below for null program date time access. if (!lastDetails.fragments.length || !details.hasProgramDateTime || !lastDetails.hasProgramDateTime) { return; } // if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM // and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM // then we can deduce that playlist B sliding is 1000+8 = 1008s const lastPDT = lastDetails.fragments[0].programDateTime; // hasProgramDateTime check above makes this safe. const newPDT = details.fragments[0].programDateTime; // date diff is in ms. frag.start is in seconds const sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start; if (sliding && isFiniteNumber(sliding)) { logger.log(`Adjusting PTS using programDateTime delta ${newPDT - lastPDT}ms, sliding:${sliding.toFixed(3)} ${details.url} `); adjustSlidingStart(sliding, details); } } /** * Ensures appropriate time-alignment between renditions based on PDT. Unlike `alignPDT`, which adjusts * the timeline based on the delta between PDTs of the 0th fragment of two playlists/`LevelDetails`, * this function assumes the timelines represented in `refDetails` are accurate, including the PDTs, * and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation * times/timelines of `details` accordingly. * Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks, * the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks * are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should * be consistent across playlists, per the HLS spec. * @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition). * @param refDetails - The details of the reference rendition with start and PDT times for alignment. */ function alignMediaPlaylistByPDT(details, refDetails) { if (!details.hasProgramDateTime || !refDetails.hasProgramDateTime) { return; } const fragments = details.fragments; const refFragments = refDetails.fragments; if (!fragments.length || !refFragments.length) { return; } // Calculate a delta to apply to all fragments according to the delta in PDT times and start times // of a fragment in the reference details, and a fragment in the target details of the same discontinuity. // If a fragment of the same discontinuity was not found use the middle fragment of both. const middleFrag = Math.round(refFragments.length / 2) - 1; const refFrag = refFragments[middleFrag]; const frag = findFirstFragWithCC(fragments, refFrag.cc) || fragments[Math.round(fragments.length / 2) - 1]; const refPDT = refFrag.programDateTime; const targetPDT = frag.programDateTime; if (refPDT === null || targetPDT === null) { return; } const delta = (targetPDT - refPDT) / 1000 - (frag.start - refFrag.start); adjustSlidingStart(delta, details); } class AESCrypto { constructor(subtle, iv) { this.subtle = void 0; this.aesIV = void 0; this.subtle = subtle; this.aesIV = iv; } decrypt(data, key) { return this.subtle.decrypt({ name: 'AES-CBC', iv: this.aesIV }, key, data); } } class FastAESKey { constructor(subtle, key) { this.subtle = void 0; this.key = void 0; this.subtle = subtle; this.key = key; } expandKey() { return this.subtle.importKey('raw', this.key, { name: 'AES-CBC' }, false, ['encrypt', 'decrypt']); } } // PKCS7 function removePadding(array) { const outputBytes = array.byteLength; const paddingBytes = outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1); if (paddingBytes) { return sliceUint8(array, 0, outputBytes - paddingBytes); } return array; } class AESDecryptor { constructor() { this.rcon = [0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36]; this.subMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)]; this.invSubMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)]; this.sBox = new Uint32Array(256); this.invSBox = new Uint32Array(256); this.key = new Uint32Array(0); this.ksRows = 0; this.keySize = 0; this.keySchedule = void 0; this.invKeySchedule = void 0; this.initTable(); } // Using view.getUint32() also swaps the byte order. uint8ArrayToUint32Array_(arrayBuffer) { const view = new DataView(arrayBuffer); const newArray = new Uint32Array(4); for (let i = 0; i < 4; i++) { newArray[i] = view.getUint32(i * 4); } return newArray; } initTable() { const sBox = this.sBox; const invSBox = this.invSBox; const subMix = this.subMix; const subMix0 = subMix[0]; const subMix1 = subMix[1]; const subMix2 = subMix[2]; const subMix3 = subMix[3]; const invSubMix = this.invSubMix; const invSubMix0 = invSubMix[0]; const invSubMix1 = invSubMix[1]; const invSubMix2 = invSubMix[2]; const invSubMix3 = invSubMix[3]; const d = new Uint32Array(256); let x = 0; let xi = 0; let i = 0; for (i = 0; i < 256; i++) { if (i < 128) { d[i] = i << 1; } else { d[i] = i << 1 ^ 0x11b; } } for (i = 0; i < 256; i++) { let sx = xi ^ xi << 1 ^ xi << 2 ^ xi << 3 ^ xi << 4; sx = sx >>> 8 ^ sx & 0xff ^ 0x63; sBox[x] = sx; invSBox[sx] = x; // Compute multiplication const x2 = d[x]; const x4 = d[x2]; const x8 = d[x4]; // Compute sub/invSub bytes, mix columns tables let t = d[sx] * 0x101 ^ sx * 0x1010100; subMix0[x] = t << 24 | t >>> 8; subMix1[x] = t << 16 | t >>> 16; subMix2[x] = t << 8 | t >>> 24; subMix3[x] = t; // Compute inv sub bytes, inv mix columns tables t = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100; invSubMix0[sx] = t << 24 | t >>> 8; invSubMix1[sx] = t << 16 | t >>> 16; invSubMix2[sx] = t << 8 | t >>> 24; invSubMix3[sx] = t; // Compute next counter if (!x) { x = xi = 1; } else { x = x2 ^ d[d[d[x8 ^ x2]]]; xi ^= d[d[xi]]; } } } expandKey(keyBuffer) { // convert keyBuffer to Uint32Array const key = this.uint8ArrayToUint32Array_(keyBuffer); let sameKey = true; let offset = 0; while (offset < key.length && sameKey) { sameKey = key[offset] === this.key[offset]; offset++; } if (sameKey) { return; } this.key = key; const keySize = this.keySize = key.length; if (keySize !== 4 && keySize !== 6 && keySize !== 8) { throw new Error('Invalid aes key size=' + keySize); } const ksRows = this.ksRows = (keySize + 6 + 1) * 4; let ksRow; let invKsRow; const keySchedule = this.keySchedule = new Uint32Array(ksRows); const invKeySchedule = this.invKeySchedule = new Uint32Array(ksRows); const sbox = this.sBox; const rcon = this.rcon; const invSubMix = this.invSubMix; const invSubMix0 = invSubMix[0]; const invSubMix1 = invSubMix[1]; const invSubMix2 = invSubMix[2]; const invSubMix3 = invSubMix[3]; let prev; let t; for (ksRow = 0; ksRow < ksRows; ksRow++) { if (ksRow < keySize) { prev = keySchedule[ksRow] = key[ksRow]; continue; } t = prev; if (ksRow % keySize === 0) { // Rot word t = t << 8 | t >>> 24; // Sub word t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff]; // Mix Rcon t ^= rcon[ksRow / keySize | 0] << 24; } else if (keySize > 6 && ksRow % keySize === 4) { // Sub word t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff]; } keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0; } for (invKsRow = 0; invKsRow < ksRows; invKsRow++) { ksRow = ksRows - invKsRow; if (invKsRow & 3) { t = keySchedule[ksRow]; } else { t = keySchedule[ksRow - 4]; } if (invKsRow < 4 || ksRow <= 4) { invKeySchedule[invKsRow] = t; } else { invKeySchedule[invKsRow] = invSubMix0[sbox[t >>> 24]] ^ invSubMix1[sbox[t >>> 16 & 0xff]] ^ invSubMix2[sbox[t >>> 8 & 0xff]] ^ invSubMix3[sbox[t & 0xff]]; } invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0; } } // Adding this as a method greatly improves performance. networkToHostOrderSwap(word) { return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24; } decrypt(inputArrayBuffer, offset, aesIV) { const nRounds = this.keySize + 6; const invKeySchedule = this.invKeySchedule; const invSBOX = this.invSBox; const invSubMix = this.invSubMix; const invSubMix0 = invSubMix[0]; const invSubMix1 = invSubMix[1]; const invSubMix2 = invSubMix[2]; const invSubMix3 = invSubMix[3]; const initVector = this.uint8ArrayToUint32Array_(aesIV); let initVector0 = initVector[0]; let initVector1 = initVector[1]; let initVector2 = initVector[2]; let initVector3 = initVector[3]; const inputInt32 = new Int32Array(inputArrayBuffer); const outputInt32 = new Int32Array(inputInt32.length); let t0, t1, t2, t3; let s0, s1, s2, s3; let inputWords0, inputWords1, inputWords2, inputWords3; let ksRow, i; const swapWord = this.networkToHostOrderSwap; while (offset < inputInt32.length) { inputWords0 = swapWord(inputInt32[offset]); inputWords1 = swapWord(inputInt32[offset + 1]); inputWords2 = swapWord(inputInt32[offset + 2]); inputWords3 = swapWord(inputInt32[offset + 3]); s0 = inputWords0 ^ invKeySchedule[0]; s1 = inputWords3 ^ invKeySchedule[1]; s2 = inputWords2 ^ invKeySchedule[2]; s3 = inputWords1 ^ invKeySchedule[3]; ksRow = 4; // Iterate through the rounds of decryption for (i = 1; i < nRounds; i++) { t0 = invSubMix0[s0 >>> 24] ^ invSubMix1[s1 >> 16 & 0xff] ^ invSubMix2[s2 >> 8 & 0xff] ^ invSubMix3[s3 & 0xff] ^ invKeySchedule[ksRow]; t1 = invSubMix0[s1 >>> 24] ^ invSubMix1[s2 >> 16 & 0xff] ^ invSubMix2[s3 >> 8 & 0xff] ^ invSubMix3[s0 & 0xff] ^ invKeySchedule[ksRow + 1]; t2 = invSubMix0[s2 >>> 24] ^ invSubMix1[s3 >> 16 & 0xff] ^ invSubMix2[s0 >> 8 & 0xff] ^ invSubMix3[s1 & 0xff] ^ invKeySchedule[ksRow + 2]; t3 = invSubMix0[s3 >>> 24] ^ invSubMix1[s0 >> 16 & 0xff] ^ invSubMix2[s1 >> 8 & 0xff] ^ invSubMix3[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Update state s0 = t0; s1 = t1; s2 = t2; s3 = t3; ksRow = ksRow + 4; } // Shift rows, sub bytes, add round key t0 = invSBOX[s0 >>> 24] << 24 ^ invSBOX[s1 >> 16 & 0xff] << 16 ^ invSBOX[s2 >> 8 & 0xff] << 8 ^ invSBOX[s3 & 0xff] ^ invKeySchedule[ksRow]; t1 = invSBOX[s1 >>> 24] << 24 ^ invSBOX[s2 >> 16 & 0xff] << 16 ^ invSBOX[s3 >> 8 & 0xff] << 8 ^ invSBOX[s0 & 0xff] ^ invKeySchedule[ksRow + 1]; t2 = invSBOX[s2 >>> 24] << 24 ^ invSBOX[s3 >> 16 & 0xff] << 16 ^ invSBOX[s0 >> 8 & 0xff] << 8 ^ invSBOX[s1 & 0xff] ^ invKeySchedule[ksRow + 2]; t3 = invSBOX[s3 >>> 24] << 24 ^ invSBOX[s0 >> 16 & 0xff] << 16 ^ invSBOX[s1 >> 8 & 0xff] << 8 ^ invSBOX[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Write outputInt32[offset] = swapWord(t0 ^ initVector0); outputInt32[offset + 1] = swapWord(t3 ^ initVector1); outputInt32[offset + 2] = swapWord(t2 ^ initVector2); outputInt32[offset + 3] = swapWord(t1 ^ initVector3); // reset initVector to last 4 unsigned int initVector0 = inputWords0; initVector1 = inputWords1; initVector2 = inputWords2; initVector3 = inputWords3; offset = offset + 4; } return outputInt32.buffer; } } const CHUNK_SIZE = 16; // 16 bytes, 128 bits class Decrypter { constructor(config, { removePKCS7Padding = true } = {}) { this.logEnabled = true; this.removePKCS7Padding = void 0; this.subtle = null; this.softwareDecrypter = null; this.key = null; this.fastAesKey = null; this.remainderData = null; this.currentIV = null; this.currentResult = null; this.useSoftware = void 0; this.useSoftware = config.enableSoftwareAES; this.removePKCS7Padding = removePKCS7Padding; // built in decryptor expects PKCS7 padding if (removePKCS7Padding) { try { const browserCrypto = self.crypto; if (browserCrypto) { this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle; } } catch (e) { /* no-op */ } } if (this.subtle === null) { this.useSoftware = true; } } destroy() { this.subtle = null; this.softwareDecrypter = null; this.key = null; this.fastAesKey = null; this.remainderData = null; this.currentIV = null; this.currentResult = null; } isSync() { return this.useSoftware; } flush() { const { currentResult, remainderData } = this; if (!currentResult || remainderData) { this.reset(); return null; } const data = new Uint8Array(currentResult); this.reset(); if (this.removePKCS7Padding) { return removePadding(data); } return data; } reset() { this.currentResult = null; this.currentIV = null; this.remainderData = null; if (this.softwareDecrypter) { this.softwareDecrypter = null; } } decrypt(data, key, iv) { if (this.useSoftware) { return new Promise((resolve, reject) => { this.softwareDecrypt(new Uint8Array(data), key, iv); const decryptResult = this.flush(); if (decryptResult) { resolve(decryptResult.buffer); } else { reject(new Error('[softwareDecrypt] Failed to decrypt data')); } }); } return this.webCryptoDecrypt(new Uint8Array(data), key, iv); } // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached // data is handled in the flush() call softwareDecrypt(data, key, iv) { const { currentIV, currentResult, remainderData } = this; this.logOnce('JS AES decrypt'); // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached // the end on flush(), but by that time we have already received all bytes for the segment. // Progressive decryption does not work with WebCrypto if (remainderData) { data = appendUint8Array(remainderData, data); this.remainderData = null; } // Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes) const currentChunk = this.getValidChunk(data); if (!currentChunk.length) { return null; } if (currentIV) { iv = currentIV; } let softwareDecrypter = this.softwareDecrypter; if (!softwareDecrypter) { softwareDecrypter = this.softwareDecrypter = new AESDecryptor(); } softwareDecrypter.expandKey(key); const result = currentResult; this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv); this.currentIV = sliceUint8(currentChunk, -16).buffer; if (!result) { return null; } return result; } webCryptoDecrypt(data, key, iv) { const subtle = this.subtle; if (this.key !== key || !this.fastAesKey) { this.key = key; this.fastAesKey = new FastAESKey(subtle, key); } return this.fastAesKey.expandKey().then(aesKey => { // decrypt using web crypto if (!subtle) { return Promise.reject(new Error('web crypto not initialized')); } this.logOnce('WebCrypto AES decrypt'); const crypto = new AESCrypto(subtle, new Uint8Array(iv)); return crypto.decrypt(data.buffer, aesKey); }).catch(err => { logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`); return this.onWebCryptoError(data, key, iv); }); } onWebCryptoError(data, key, iv) { this.useSoftware = true; this.logEnabled = true; this.softwareDecrypt(data, key, iv); const decryptResult = this.flush(); if (decryptResult) { return decryptResult.buffer; } throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data'); } getValidChunk(data) { let currentChunk = data; const splitPoint = data.length - data.length % CHUNK_SIZE; if (splitPoint !== data.length) { currentChunk = sliceUint8(data, 0, splitPoint); this.remainderData = sliceUint8(data, splitPoint); } return currentChunk; } logOnce(msg) { if (!this.logEnabled) { return; } logger.log(`[decrypter]: ${msg}`); this.logEnabled = false; } } /** * TimeRanges to string helper */ const TimeRanges = { toString: function (r) { let log = ''; const len = r.length; for (let i = 0; i < len; i++) { log += `[${r.start(i).toFixed(3)}-${r.end(i).toFixed(3)}]`; } return log; } }; const State = { STOPPED: 'STOPPED', IDLE: 'IDLE', KEY_LOADING: 'KEY_LOADING', FRAG_LOADING: 'FRAG_LOADING', FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY', WAITING_TRACK: 'WAITING_TRACK', PARSING: 'PARSING', PARSED: 'PARSED', ENDED: 'ENDED', ERROR: 'ERROR', WAITING_INIT_PTS: 'WAITING_INIT_PTS', WAITING_LEVEL: 'WAITING_LEVEL' }; class BaseStreamController extends TaskLoop { constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) { super(); this.hls = void 0; this.fragPrevious = null; this.fragCurrent = null; this.fragmentTracker = void 0; this.transmuxer = null; this._state = State.STOPPED; this.playlistType = void 0; this.media = null; this.mediaBuffer = null; this.config = void 0; this.bitrateTest = false; this.lastCurrentTime = 0; this.nextLoadPosition = 0; this.startPosition = 0; this.startTimeOffset = null; this.loadedmetadata = false; this.retryDate = 0; this.levels = null; this.fragmentLoader = void 0; this.keyLoader = void 0; this.levelLastLoaded = null; this.startFragRequested = false; this.decrypter = void 0; this.initPTS = []; this.onvseeking = null; this.onvended = null; this.logPrefix = ''; this.log = void 0; this.warn = void 0; this.playlistType = playlistType; this.logPrefix = logPrefix; this.log = logger.log.bind(logger, `${logPrefix}:`); this.warn = logger.warn.bind(logger, `${logPrefix}:`); this.hls = hls; this.fragmentLoader = new FragmentLoader(hls.config); this.keyLoader = keyLoader; this.fragmentTracker = fragmentTracker; this.config = hls.config; this.decrypter = new Decrypter(hls.config); hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this); } doTick() { this.onTickEnd(); } onTickEnd() {} // eslint-disable-next-line @typescript-eslint/no-unused-vars startLoad(startPosition) {} stopLoad() { this.fragmentLoader.abort(); this.keyLoader.abort(this.playlistType); const frag = this.fragCurrent; if (frag != null && frag.loader) { frag.abortRequests(); this.fragmentTracker.removeFragment(frag); } this.resetTransmuxer(); this.fragCurrent = null; this.fragPrevious = null; this.clearInterval(); this.clearNextTick(); this.state = State.STOPPED; } _streamEnded(bufferInfo, levelDetails) { // If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached, // of nothing loading/loaded return false if (levelDetails.live || bufferInfo.nextStart || !bufferInfo.end || !this.media) { return false; } const partList = levelDetails.partList; // Since the last part isn't guaranteed to correspond to the last playlist segment for Low-Latency HLS, // check instead if the last part is buffered. if (partList != null && partList.length) { const lastPart = partList[partList.length - 1]; // Checking the midpoint of the part for potential margin of error and related issues. // NOTE: Technically I believe parts could yield content that is < the computed duration (including potential a duration of 0) // and still be spec-compliant, so there may still be edge cases here. Likewise, there could be issues in end of stream // part mismatches for independent audio and video playlists/segments. const lastPartBuffered = BufferHelper.isBuffered(this.media, lastPart.start + lastPart.duration / 2); return lastPartBuffered; } const playlistType = levelDetails.fragments[levelDetails.fragments.length - 1].type; return this.fragmentTracker.isEndListAppended(playlistType); } getLevelDetails() { if (this.levels && this.levelLastLoaded !== null) { var _this$levels$this$lev; return (_this$levels$this$lev = this.levels[this.levelLastLoaded]) == null ? void 0 : _this$levels$this$lev.details; } } onMediaAttached(event, data) { const media = this.media = this.mediaBuffer = data.media; this.onvseeking = this.onMediaSeeking.bind(this); this.onvended = this.onMediaEnded.bind(this); media.addEventListener('seeking', this.onvseeking); media.addEventListener('ended', this.onvended); const config = this.config; if (this.levels && config.autoStartLoad && this.state === State.STOPPED) { this.startLoad(config.startPosition); } } onMediaDetaching() { const media = this.media; if (media != null && media.ended) { this.log('MSE detaching and video ended, reset startPosition'); this.startPosition = this.lastCurrentTime = 0; } // remove video listeners if (media && this.onvseeking && this.onvended) { media.removeEventListener('seeking', this.onvseeking); media.removeEventListener('ended', this.onvended); this.onvseeking = this.onvended = null; } if (this.keyLoader) { this.keyLoader.detach(); } this.media = this.mediaBuffer = null; this.loadedmetadata = false; this.fragmentTracker.removeAllFragments(); this.stopLoad(); } onMediaSeeking() { const { config, fragCurrent, media, mediaBuffer, state } = this; const currentTime = media ? media.currentTime : 0; const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole); this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`); if (this.state === State.ENDED) { this.resetLoadingState(); } else if (fragCurrent) { // Seeking while frag load is in progress const tolerance = config.maxFragLookUpTolerance; const fragStartOffset = fragCurrent.start - tolerance; const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance; // if seeking out of buffered range or into new one if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) { const pastFragment = currentTime > fragEndOffset; // if the seek position is outside the current fragment range if (currentTime < fragStartOffset || pastFragment) { if (pastFragment && fragCurrent.loader) { this.log('seeking outside of buffer while fragment load in progress, cancel fragment load'); fragCurrent.abortRequests(); this.resetLoadingState(); } this.fragPrevious = null; } } } if (media) { // Remove gap fragments this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true); this.lastCurrentTime = currentTime; } // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target if (!this.loadedmetadata && !bufferInfo.len) { this.nextLoadPosition = this.startPosition = currentTime; } // Async tick to speed up processing this.tickImmediate(); } onMediaEnded() { // reset startPosition and lastCurrentTime to restart playback @ stream beginning this.startPosition = this.lastCurrentTime = 0; } onManifestLoaded(event, data) { this.startTimeOffset = data.startTimeOffset; this.initPTS = []; } onHandlerDestroying() { this.stopLoad(); super.onHandlerDestroying(); } onHandlerDestroyed() { this.state = State.STOPPED; if (this.fragmentLoader) { this.fragmentLoader.destroy(); } if (this.keyLoader) { this.keyLoader.destroy(); } if (this.decrypter) { this.decrypter.destroy(); } this.hls = this.log = this.warn = this.decrypter = this.keyLoader = this.fragmentLoader = this.fragmentTracker = null; super.onHandlerDestroyed(); } loadFragment(frag, level, targetBufferTime) { this._loadFragForPlayback(frag, level, targetBufferTime); } _loadFragForPlayback(frag, level, targetBufferTime) { const progressCallback = data => { if (this.fragContextChanged(frag)) { this.warn(`Fragment ${frag.sn}${data.part ? ' p: ' + data.part.index : ''} of level ${frag.level} was dropped during download.`); this.fragmentTracker.removeFragment(frag); return; } frag.stats.chunkCount++; this._handleFragmentLoadProgress(data); }; this._doFragLoad(frag, level, targetBufferTime, progressCallback).then(data => { if (!data) { // if we're here we probably needed to backtrack or are waiting for more parts return; } const state = this.state; if (this.fragContextChanged(frag)) { if (state === State.FRAG_LOADING || !this.fragCurrent && state === State.PARSING) { this.fragmentTracker.removeFragment(frag); this.state = State.IDLE; } return; } if ('payload' in data) { this.log(`Loaded fragment ${frag.sn} of level ${frag.level}`); this.hls.trigger(Events.FRAG_LOADED, data); } // Pass through the whole payload; controllers not implementing progressive loading receive data from this callback this._handleFragmentLoadComplete(data); }).catch(reason => { if (this.state === State.STOPPED || this.state === State.ERROR) { return; } this.warn(reason); this.resetFragmentLoading(frag); }); } clearTrackerIfNeeded(frag) { var _this$mediaBuffer; const { fragmentTracker } = this; const fragState = fragmentTracker.getState(frag); if (fragState === FragmentState.APPENDING) { // Lower the buffer size and try again const playlistType = frag.type; const bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, playlistType); const minForwardBufferLength = Math.max(frag.duration, bufferedInfo ? bufferedInfo.len : this.config.maxBufferLength); if (this.reduceMaxBufferLength(minForwardBufferLength)) { fragmentTracker.removeFragment(frag); } } else if (((_this$mediaBuffer = this.mediaBuffer) == null ? void 0 : _this$mediaBuffer.buffered.length) === 0) { // Stop gap for bad tracker / buffer flush behavior fragmentTracker.removeAllFragments(); } else if (fragmentTracker.hasParts(frag.type)) { // In low latency mode, remove fragments for which only some parts were buffered fragmentTracker.detectPartialFragments({ frag, part: null, stats: frag.stats, id: frag.type }); if (fragmentTracker.getState(frag) === FragmentState.PARTIAL) { fragmentTracker.removeFragment(frag); } } } checkLiveUpdate(details) { if (details.updated && !details.live) { // Live stream ended, update fragment tracker const lastFragment = details.fragments[details.fragments.length - 1]; this.fragmentTracker.detectPartialFragments({ frag: lastFragment, part: null, stats: lastFragment.stats, id: lastFragment.type }); } if (!details.fragments[0]) { details.deltaUpdateFailed = true; } } flushMainBuffer(startOffset, endOffset, type = null) { if (!(startOffset - endOffset)) { return; } // When alternate audio is playing, the audio-stream-controller is responsible for the audio buffer. Otherwise, // passing a null type flushes both buffers const flushScope = { startOffset, endOffset, type }; this.hls.trigger(Events.BUFFER_FLUSHING, flushScope); } _loadInitSegment(frag, level) { this._doFragLoad(frag, level).then(data => { if (!data || this.fragContextChanged(frag) || !this.levels) { throw new Error('init load aborted'); } return data; }).then(data => { const { hls } = this; const { payload } = data; const decryptData = frag.decryptdata; // check to see if the payload needs to be decrypted if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') { const startTime = self.performance.now(); // decrypt init segment data return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => { hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_DECRYPT_ERROR, fatal: false, error: err, reason: err.message, frag }); throw err; }).then(decryptedData => { const endTime = self.performance.now(); hls.trigger(Events.FRAG_DECRYPTED, { frag, payload: decryptedData, stats: { tstart: startTime, tdecrypt: endTime } }); data.payload = decryptedData; return data; }); } return data; }).then(data => { const { fragCurrent, hls, levels } = this; if (!levels) { throw new Error('init load aborted, missing levels'); } const stats = frag.stats; this.state = State.IDLE; level.fragmentError = 0; frag.data = new Uint8Array(data.payload); stats.parsing.start = stats.buffering.start = self.performance.now(); stats.parsing.end = stats.buffering.end = self.performance.now(); // Silence FRAG_BUFFERED event if fragCurrent is null if (data.frag === fragCurrent) { hls.trigger(Events.FRAG_BUFFERED, { stats, frag: fragCurrent, part: null, id: frag.type }); } this.tick(); }).catch(reason => { if (this.state === State.STOPPED || this.state === State.ERROR) { return; } this.warn(reason); this.resetFragmentLoading(frag); }); } fragContextChanged(frag) { const { fragCurrent } = this; return !frag || !fragCurrent || frag.level !== fragCurrent.level || frag.sn !== fragCurrent.sn || frag.urlId !== fragCurrent.urlId; } fragBufferedComplete(frag, part) { var _frag$startPTS, _frag$endPTS, _this$fragCurrent, _this$fragPrevious; const media = this.mediaBuffer ? this.mediaBuffer : this.media; this.log(`Buffered ${frag.type} sn: ${frag.sn}${part ? ' part: ' + part.index : ''} of ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level} (frag:[${((_frag$startPTS = frag.startPTS) != null ? _frag$startPTS : NaN).toFixed(3)}-${((_frag$endPTS = frag.endPTS) != null ? _frag$endPTS : NaN).toFixed(3)}] > buffer:${media ? TimeRanges.toString(BufferHelper.getBuffered(media)) : '(detached)'})`); this.state = State.IDLE; if (!media) { return; } if (!this.loadedmetadata && frag.type == PlaylistLevelType.MAIN && media.buffered.length && ((_this$fragCurrent = this.fragCurrent) == null ? void 0 : _this$fragCurrent.sn) === ((_this$fragPrevious = this.fragPrevious) == null ? void 0 : _this$fragPrevious.sn)) { this.loadedmetadata = true; this.seekToStartPos(); } this.tick(); } seekToStartPos() {} _handleFragmentLoadComplete(fragLoadedEndData) { const { transmuxer } = this; if (!transmuxer) { return; } const { frag, part, partsLoaded } = fragLoadedEndData; // If we did not load parts, or loaded all parts, we have complete (not partial) fragment data const complete = !partsLoaded || partsLoaded.length === 0 || partsLoaded.some(fragLoaded => !fragLoaded); const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount + 1, 0, part ? part.index : -1, !complete); transmuxer.flush(chunkMeta); } // eslint-disable-next-line @typescript-eslint/no-unused-vars _handleFragmentLoadProgress(frag) {} _doFragLoad(frag, level, targetBufferTime = null, progressCallback) { var _frag$decryptdata; const details = level == null ? void 0 : level.details; if (!this.levels || !details) { throw new Error(`frag load aborted, missing level${details ? '' : ' detail'}s`); } let keyLoadingPromise = null; if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) { this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`); this.state = State.KEY_LOADING; this.fragCurrent = frag; keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => { if (!this.fragContextChanged(keyLoadedData.frag)) { this.hls.trigger(Events.KEY_LOADED, keyLoadedData); if (this.state === State.KEY_LOADING) { this.state = State.IDLE; } return keyLoadedData; } }); this.hls.trigger(Events.KEY_LOADING, { frag }); if (this.fragCurrent === null) { keyLoadingPromise = Promise.reject(new Error(`frag load aborted, context changed in KEY_LOADING`)); } } else if (!frag.encrypted && details.encryptedFragments.length) { this.keyLoader.loadClear(frag, details.encryptedFragments); } targetBufferTime = Math.max(frag.start, targetBufferTime || 0); if (this.config.lowLatencyMode && frag.sn !== 'initSegment') { const partList = details.partList; if (partList && progressCallback) { if (targetBufferTime > frag.end && details.fragmentHint) { frag = details.fragmentHint; } const partIndex = this.getNextPart(partList, frag, targetBufferTime); if (partIndex > -1) { const part = partList[partIndex]; this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`); this.nextLoadPosition = part.start + part.duration; this.state = State.FRAG_LOADING; let _result; if (keyLoadingPromise) { _result = keyLoadingPromise.then(keyLoadedData => { if (!keyLoadedData || this.fragContextChanged(keyLoadedData.frag)) { return null; } return this.doFragPartsLoad(frag, part, level, progressCallback); }).catch(error => this.handleFragLoadError(error)); } else { _result = this.doFragPartsLoad(frag, part, level, progressCallback).catch(error => this.handleFragLoadError(error)); } this.hls.trigger(Events.FRAG_LOADING, { frag, part, targetBufferTime }); if (this.fragCurrent === null) { return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING parts`)); } return _result; } else if (!frag.url || this.loadedEndOfParts(partList, targetBufferTime)) { // Fragment hint has no parts return Promise.resolve(null); } } } this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`); // Don't update nextLoadPosition for fragments which are not buffered if (isFiniteNumber(frag.sn) && !this.bitrateTest) { this.nextLoadPosition = frag.start + frag.duration; } this.state = State.FRAG_LOADING; // Load key before streaming fragment data const dataOnProgress = this.config.progressive; let result; if (dataOnProgress && keyLoadingPromise) { result = keyLoadingPromise.then(keyLoadedData => { if (!keyLoadedData || this.fragContextChanged(keyLoadedData == null ? void 0 : keyLoadedData.frag)) { return null; } return this.fragmentLoader.load(frag, progressCallback); }).catch(error => this.handleFragLoadError(error)); } else { // load unencrypted fragment data with progress event, // or handle fragment result after key and fragment are finished loading result = Promise.all([this.fragmentLoader.load(frag, dataOnProgress ? progressCallback : undefined), keyLoadingPromise]).then(([fragLoadedData]) => { if (!dataOnProgress && fragLoadedData && progressCallback) { progressCallback(fragLoadedData); } return fragLoadedData; }).catch(error => this.handleFragLoadError(error)); } this.hls.trigger(Events.FRAG_LOADING, { frag, targetBufferTime }); if (this.fragCurrent === null) { return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING`)); } return result; } doFragPartsLoad(frag, fromPart, level, progressCallback) { return new Promise((resolve, reject) => { var _level$details; const partsLoaded = []; const initialPartList = (_level$details = level.details) == null ? void 0 : _level$details.partList; const loadPart = part => { this.fragmentLoader.loadPart(frag, part, progressCallback).then(partLoadedData => { partsLoaded[part.index] = partLoadedData; const loadedPart = partLoadedData.part; this.hls.trigger(Events.FRAG_LOADED, partLoadedData); const nextPart = getPartWith(level, frag.sn, part.index + 1) || findPart(initialPartList, frag.sn, part.index + 1); if (nextPart) { loadPart(nextPart); } else { return resolve({ frag, part: loadedPart, partsLoaded }); } }).catch(reject); }; loadPart(fromPart); }); } handleFragLoadError(error) { if ('data' in error) { const data = error.data; if (error.data && data.details === ErrorDetails.INTERNAL_ABORTED) { this.handleFragLoadAborted(data.frag, data.part); } else { this.hls.trigger(Events.ERROR, data); } } else { this.hls.trigger(Events.ERROR, { type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.INTERNAL_EXCEPTION, err: error, error, fatal: true }); } return null; } _handleTransmuxerFlush(chunkMeta) { const context = this.getCurrentContext(chunkMeta); if (!context || this.state !== State.PARSING) { if (!this.fragCurrent && this.state !== State.STOPPED && this.state !== State.ERROR) { this.state = State.IDLE; } return; } const { frag, part, level } = context; const now = self.performance.now(); frag.stats.parsing.end = now; if (part) { part.stats.parsing.end = now; } this.updateLevelTiming(frag, part, level, chunkMeta.partial); } getCurrentContext(chunkMeta) { const { levels, fragCurrent } = this; const { level: levelIndex, sn, part: partIndex } = chunkMeta; if (!(levels != null && levels[levelIndex])) { this.warn(`Levels object was unset while buffering fragment ${sn} of level ${levelIndex}. The current chunk will not be buffered.`); return null; } const level = levels[levelIndex]; const part = partIndex > -1 ? getPartWith(level, sn, partIndex) : null; const frag = part ? part.fragment : getFragmentWithSN(level, sn, fragCurrent); if (!frag) { return null; } if (fragCurrent && fragCurrent !== frag) { frag.stats = fragCurrent.stats; } return { frag, part, level }; } bufferFragmentData(data, frag, part, chunkMeta, noBacktracking) { var _buffer; if (!data || this.state !== State.PARSING) { return; } const { data1, data2 } = data; let buffer = data1; if (data1 && data2) { // Combine the moof + mdat so that we buffer with a single append buffer = appendUint8Array(data1, data2); } if (!((_buffer = buffer) != null && _buffer.length)) { return; } const segment = { type: data.type, frag, part, chunkMeta, parent: frag.type, data: buffer }; this.hls.trigger(Events.BUFFER_APPENDING, segment); if (data.dropped && data.independent && !part) { if (noBacktracking) { return; } // Clear buffer so that we reload previous segments sequentially if required this.flushBufferGap(frag); } } flushBufferGap(frag) { const media = this.media; if (!media) { return; } // If currentTime is not buffered, clear the back buffer so that we can backtrack as much as needed if (!BufferHelper.isBuffered(media, media.currentTime)) { this.flushMainBuffer(0, frag.start); return; } // Remove back-buffer without interrupting playback to allow back tracking const currentTime = media.currentTime; const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0); const fragDuration = frag.duration; const segmentFraction = Math.min(this.config.maxFragLookUpTolerance * 2, fragDuration * 0.25); const start = Math.max(Math.min(frag.start - segmentFraction, bufferInfo.end - segmentFraction), currentTime + segmentFraction); if (frag.start - start > segmentFraction) { this.flushMainBuffer(start, frag.start); } } getFwdBufferInfo(bufferable, type) { const pos = this.getLoadPosition(); if (!isFiniteNumber(pos)) { return null; } return this.getFwdBufferInfoAtPos(bufferable, pos, type); } getFwdBufferInfoAtPos(bufferable, pos, type) { const { config: { maxBufferHole } } = this; const bufferInfo = BufferHelper.bufferInfo(bufferable, pos, maxBufferHole); // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) { const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type); if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) { return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole)); } } return bufferInfo; } getMaxBufferLength(levelBitrate) { const { config } = this; let maxBufLen; if (levelBitrate) { maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength); } else { maxBufLen = config.maxBufferLength; } return Math.min(maxBufLen, config.maxMaxBufferLength); } reduceMaxBufferLength(threshold) { const config = this.config; const minLength = threshold || config.maxBufferLength; if (config.maxMaxBufferLength >= minLength) { // reduce max buffer length as it might be too high. we do this to avoid loop flushing ... config.maxMaxBufferLength /= 2; this.warn(`Reduce max buffer length to ${config.maxMaxBufferLength}s`); return true; } return false; } getAppendedFrag(position, playlistType = PlaylistLevelType.MAIN) { const fragOrPart = this.fragmentTracker.getAppendedFrag(position, PlaylistLevelType.MAIN); if (fragOrPart && 'fragment' in fragOrPart) { return fragOrPart.fragment; } return fragOrPart; } getNextFragment(pos, levelDetails) { const fragments = levelDetails.fragments; const fragLen = fragments.length; if (!fragLen) { return null; } // find fragment index, contiguous with end of buffer position const { config } = this; const start = fragments[0].start; let frag; if (levelDetails.live) { const initialLiveManifestSize = config.initialLiveManifestSize; if (fragLen < initialLiveManifestSize) { this.warn(`Not enough fragments to start playback (have: ${fragLen}, need: ${initialLiveManifestSize})`); return null; } // The real fragment start times for a live stream are only known after the PTS range for that level is known. // In order to discover the range, we load the best matching fragment for that level and demux it. // Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that // we get the fragment matching that start time if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1) { frag = this.getInitialLiveFragment(levelDetails, fragments); this.startPosition = frag ? this.hls.liveSyncPosition || frag.start : pos; } } else if (pos <= start) { // VoD playlist: if loadPosition before start of playlist, load first fragment frag = fragments[0]; } // If we haven't run into any special cases already, just load the fragment most closely matching the requested position if (!frag) { const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd; frag = this.getFragmentAtPosition(pos, end, levelDetails); } return this.mapToInitFragWhenRequired(frag); } isLoopLoading(frag, targetBufferTime) { const trackerState = this.fragmentTracker.getState(frag); return (trackerState === FragmentState.OK || trackerState === FragmentState.PARTIAL && !!frag.gap) && this.nextLoadPosition > targetBufferTime; } getNextFragmentLoopLoading(frag, levelDetails, bufferInfo, playlistType, maxBufLen) { const gapStart = frag.gap; const nextFragment = this.getNextFragment(this.nextLoadPosition, levelDetails); if (nextFragment === null) { return nextFragment; } frag = nextFragment; if (gapStart && frag && !frag.gap && bufferInfo.nextStart) { // Media buffered after GAP tags should not make the next buffer timerange exceed forward buffer length const nextbufferInfo = this.getFwdBufferInfoAtPos(this.mediaBuffer ? this.mediaBuffer : this.media, bufferInfo.nextStart, playlistType); if (nextbufferInfo !== null && bufferInfo.len + nextbufferInfo.len >= maxBufLen) { // Returning here might result in not finding an audio and video candiate to skip to this.log(`buffer full after gaps in "${playlistType}" playlist starting at sn: ${frag.sn}`); return null; } } return frag; } mapToInitFragWhenRequired(frag) { // If an initSegment is present, it must be buffered first if (frag != null && frag.initSegment && !(frag != null && frag.initSegment.data) && !this.bitrateTest) { return frag.initSegment; } return frag; } getNextPart(partList, frag, targetBufferTime) { let nextPart = -1; let contiguous = false; let independentAttrOmitted = true; for (let i = 0, len = partList.length; i < len; i++) { const part = partList[i]; independentAttrOmitted = independentAttrOmitted && !part.independent; if (nextPart > -1 && targetBufferTime < part.start) { break; } const loaded = part.loaded; if (loaded) { nextPart = -1; } else if ((contiguous || part.independent || independentAttrOmitted) && part.fragment === frag) { nextPart = i; } contiguous = loaded; } return nextPart; } loadedEndOfParts(partList, targetBufferTime) { const lastPart = partList[partList.length - 1]; return lastPart && targetBufferTime > lastPart.start && lastPart.loaded; } /* This method is used find the best matching first fragment for a live playlist. This fragment is used to calculate the "sliding" of the playlist, which is its offset from the start of playback. After sliding we can compute the real start and end times for each fragment in the playlist (after which this method will not need to be called). */ getInitialLiveFragment(levelDetails, fragments) { const fragPrevious = this.fragPrevious; let frag = null; if (fragPrevious) { if (levelDetails.hasProgramDateTime) { // Prefer using PDT, because it can be accurate enough to choose the correct fragment without knowing the level sliding this.log(`Live playlist, switching playlist, load frag with same PDT: ${fragPrevious.programDateTime}`); frag = findFragmentByPDT(fragments, fragPrevious.endProgramDateTime, this.config.maxFragLookUpTolerance); } if (!frag) { // SN does not need to be accurate between renditions, but depending on the packaging it may be so. const targetSN = fragPrevious.sn + 1; if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) { const fragNext = fragments[targetSN - levelDetails.startSN]; // Ensure that we're staying within the continuity range, since PTS resets upon a new range if (fragPrevious.cc === fragNext.cc) { frag = fragNext; this.log(`Live playlist, switching playlist, load frag with next SN: ${frag.sn}`); } } // It's important to stay within the continuity range if available; otherwise the fragments in the playlist // will have the wrong start times if (!frag) { frag = findFragWithCC(fragments, fragPrevious.cc); if (frag) { this.log(`Live playlist, switching playlist, load frag with same CC: ${frag.sn}`); } } } } else { // Find a new start fragment when fragPrevious is null const liveStart = this.hls.liveSyncPosition; if (liveStart !== null) { frag = this.getFragmentAtPosition(liveStart, this.bitrateTest ? levelDetails.fragmentEnd : levelDetails.edge, levelDetails); } } return frag; } /* This method finds the best matching fragment given the provided position. */ getFragmentAtPosition(bufferEnd, end, levelDetails) { const { config } = this; let { fragPrevious } = this; let { fragments, endSN } = levelDetails; const { fragmentHint } = levelDetails; const tolerance = config.maxFragLookUpTolerance; const partList = levelDetails.partList; const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint); if (loadingParts && fragmentHint && !this.bitrateTest) { // Include incomplete fragment with parts at end fragments = fragments.concat(fragmentHint); endSN = fragmentHint.sn; } let frag; if (bufferEnd < end) { const lookupTolerance = bufferEnd > end - tolerance ? 0 : tolerance; // Remove the tolerance if it would put the bufferEnd past the actual end of stream // Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE) frag = findFragmentByPTS(fragPrevious, fragments, bufferEnd, lookupTolerance); } else { // reach end of playlist frag = fragments[fragments.length - 1]; } if (frag) { const curSNIdx = frag.sn - levelDetails.startSN; // Move fragPrevious forward to support forcing the next fragment to load // when the buffer catches up to a previously buffered range. const fragState = this.fragmentTracker.getState(frag); if (fragState === FragmentState.OK || fragState === FragmentState.PARTIAL && frag.gap) { fragPrevious = frag; } if (fragPrevious && frag.sn === fragPrevious.sn && (!loadingParts || partList[0].fragment.sn > frag.sn)) { // Force the next fragment to load if the previous one was already selected. This can occasionally happen with // non-uniform fragment durations const sameLevel = fragPrevious && frag.level === fragPrevious.level; if (sameLevel) { const nextFrag = fragments[curSNIdx + 1]; if (frag.sn < endSN && this.fragmentTracker.getState(nextFrag) !== FragmentState.OK) { frag = nextFrag; } else { frag = null; } } } } return frag; } synchronizeToLiveEdge(levelDetails) { const { config, media } = this; if (!media) { return; } const liveSyncPosition = this.hls.liveSyncPosition; const currentTime = media.currentTime; const start = levelDetails.fragments[0].start; const end = levelDetails.edge; const withinSlidingWindow = currentTime >= start - config.maxFragLookUpTolerance && currentTime <= end; // Continue if we can seek forward to sync position or if current time is outside of sliding window if (liveSyncPosition !== null && media.duration > liveSyncPosition && (currentTime < liveSyncPosition || !withinSlidingWindow)) { // Continue if buffer is starving or if current time is behind max latency const maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount * levelDetails.targetduration; if (!withinSlidingWindow && media.readyState < 4 || currentTime < end - maxLatency) { if (!this.loadedmetadata) { this.nextLoadPosition = liveSyncPosition; } // Only seek if ready and there is not a significant forward buffer available for playback if (media.readyState) { this.warn(`Playback: ${currentTime.toFixed(3)} is located too far from the end of live sliding playlist: ${end}, reset currentTime to : ${liveSyncPosition.toFixed(3)}`); media.currentTime = liveSyncPosition; } } } } alignPlaylists(details, previousDetails) { const { levels, levelLastLoaded, fragPrevious } = this; const lastLevel = levelLastLoaded !== null ? levels[levelLastLoaded] : null; // FIXME: If not for `shouldAlignOnDiscontinuities` requiring fragPrevious.cc, // this could all go in level-helper mergeDetails() const length = details.fragments.length; if (!length) { this.warn(`No fragments in live playlist`); return 0; } const slidingStart = details.fragments[0].start; const firstLevelLoad = !previousDetails; const aligned = details.alignedSliding && isFiniteNumber(slidingStart); if (firstLevelLoad || !aligned && !slidingStart) { alignStream(fragPrevious, lastLevel, details); const alignedSlidingStart = details.fragments[0].start; this.log(`Live playlist sliding: ${alignedSlidingStart.toFixed(2)} start-sn: ${previousDetails ? previousDetails.startSN : 'na'}->${details.startSN} prev-sn: ${fragPrevious ? fragPrevious.sn : 'na'} fragments: ${length}`); return alignedSlidingStart; } return slidingStart; } waitForCdnTuneIn(details) { // Wait for Low-Latency CDN Tune-in to get an updated playlist const advancePartLimit = 3; return details.live && details.canBlockReload && details.partTarget && details.tuneInGoal > Math.max(details.partHoldBack, details.partTarget * advancePartLimit); } setStartPosition(details, sliding) { // compute start position if set to -1. use it straight away if value is defined let startPosition = this.startPosition; if (startPosition < sliding) { startPosition = -1; } if (startPosition === -1 || this.lastCurrentTime === -1) { // Use Playlist EXT-X-START:TIME-OFFSET when set // Prioritize Multivariant Playlist offset so that main, audio, and subtitle stream-controller start times match const offsetInMultivariantPlaylist = this.startTimeOffset !== null; const startTimeOffset = offsetInMultivariantPlaylist ? this.startTimeOffset : details.startTimeOffset; if (startTimeOffset !== null && isFiniteNumber(startTimeOffset)) { startPosition = sliding + startTimeOffset; if (startTimeOffset < 0) { startPosition += details.totalduration; } startPosition = Math.min(Math.max(sliding, startPosition), sliding + details.totalduration); this.log(`Start time offset ${startTimeOffset} found in ${offsetInMultivariantPlaylist ? 'multivariant' : 'media'} playlist, adjust startPosition to ${startPosition}`); this.startPosition = startPosition; } else if (details.live) { // Leave this.startPosition at -1, so that we can use `getInitialLiveFragment` logic when startPosition has // not been specified via the config or an as an argument to startLoad (#3736). startPosition = this.hls.liveSyncPosition || sliding; } else { this.startPosition = startPosition = 0; } this.lastCurrentTime = startPosition; } this.nextLoadPosition = startPosition; } getLoadPosition() { const { media } = this; // if we have not yet loaded any fragment, start loading from start position let pos = 0; if (this.loadedmetadata && media) { pos = media.currentTime; } else if (this.nextLoadPosition) { pos = this.nextLoadPosition; } return pos; } handleFragLoadAborted(frag, part) { if (this.transmuxer && frag.sn !== 'initSegment' && frag.stats.aborted) { this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} was aborted`); this.resetFragmentLoading(frag); } } resetFragmentLoading(frag) { if (!this.fragCurrent || !this.fragContextChanged(frag) && this.state !== State.FRAG_LOADING_WAITING_RETRY) { this.state = State.IDLE; } } onFragmentOrKeyLoadError(filterType, data) { if (data.chunkMeta && !data.frag) { const context = this.getCurrentContext(data.chunkMeta); if (context) { data.frag = context.frag; } } const frag = data.frag; // Handle frag error related to caller's filterType if (!frag || frag.type !== filterType || !this.levels) { return; } if (this.fragContextChanged(frag)) { var _this$fragCurrent2; this.warn(`Frag load error must match current frag to retry ${frag.url} > ${(_this$fragCurrent2 = this.fragCurrent) == null ? void 0 : _this$fragCurrent2.url}`); return; } const gapTagEncountered = data.details === ErrorDetails.FRAG_GAP; if (gapTagEncountered) { this.fragmentTracker.fragBuffered(frag, true); } // keep retrying until the limit will be reached const errorAction = data.errorAction; const { action, retryCount = 0, retryConfig } = errorAction || {}; if (errorAction && action === NetworkErrorAction.RetryRequest && retryConfig) { var _this$levelLastLoaded; this.resetStartWhenNotLoaded((_this$levelLastLoaded = this.levelLastLoaded) != null ? _this$levelLastLoaded : frag.level); const delay = getRetryDelay(retryConfig, retryCount); this.warn(`Fragment ${frag.sn} of ${filterType} ${frag.level} errored with ${data.details}, retrying loading ${retryCount + 1}/${retryConfig.maxNumRetry} in ${delay}ms`); errorAction.resolved = true; this.retryDate = self.performance.now() + delay; this.state = State.FRAG_LOADING_WAITING_RETRY; } else if (retryConfig && errorAction) { this.resetFragmentErrors(filterType); if (retryCount < retryConfig.maxNumRetry) { // Network retry is skipped when level switch is preferred if (!gapTagEncountered) { errorAction.resolved = true; } } else { logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`); } } else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) { this.state = State.WAITING_LEVEL; } else { this.state = State.ERROR; } // Perform next async tick sooner to speed up error action resolution this.tickImmediate(); } reduceLengthAndFlushBuffer(data) { // if in appending state if (this.state === State.PARSING || this.state === State.PARSED) { const playlistType = data.parent; const bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, playlistType); // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end // reduce max buf len if current position is buffered const buffered = bufferedInfo && bufferedInfo.len > 0.5; if (buffered) { this.reduceMaxBufferLength(bufferedInfo.len); } const flushBuffer = !buffered; if (flushBuffer) { // current position is not buffered, but browser is still complaining about buffer full error // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708 // in that case flush the whole audio buffer to recover this.warn(`Buffer full error while media.currentTime is not buffered, flush ${playlistType} buffer`); } if (data.frag) { this.fragmentTracker.removeFragment(data.frag); this.nextLoadPosition = data.frag.start; } this.resetLoadingState(); return flushBuffer; } return false; } resetFragmentErrors(filterType) { if (filterType === PlaylistLevelType.AUDIO) { // Reset current fragment since audio track audio is essential and may not have a fail-over track this.fragCurrent = null; } // Fragment errors that result in a level switch or redundant fail-over // should reset the stream controller state to idle if (!this.loadedmetadata) { this.startFragRequested = false; } if (this.state !== State.STOPPED) { this.state = State.IDLE; } } afterBufferFlushed(media, bufferType, playlistType) { if (!media) { return; } // After successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media // (so that we will check against video.buffered ranges in case of alt audio track) const bufferedTimeRanges = BufferHelper.getBuffered(media); this.fragmentTracker.detectEvictedFragments(bufferType, bufferedTimeRanges, playlistType); if (this.state === State.ENDED) { this.resetLoadingState(); } } resetLoadingState() { this.log('Reset loading state'); this.fragCurrent = null; this.fragPrevious = null; this.state = State.IDLE; } resetStartWhenNotLoaded(level) { // if loadedmetadata is not set, it means that first frag request failed // in that case, reset startFragRequested flag if (!this.loadedmetadata) { this.startFragRequested = false; const details = this.levels ? this.levels[level].details : null; if (details != null && details.live) { // Update the start position and return to IDLE to recover live start this.startPosition = -1; this.setStartPosition(details, 0); this.resetLoadingState(); } else { this.nextLoadPosition = this.startPosition; } } } resetWhenMissingContext(chunkMeta) { var _this$levelLastLoaded2; this.warn(`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`); this.removeUnbufferedFrags(); this.resetStartWhenNotLoaded((_this$levelLastLoaded2 = this.levelLastLoaded) != null ? _this$levelLastLoaded2 : chunkMeta.level); this.resetLoadingState(); } removeUnbufferedFrags(start = 0) { this.fragmentTracker.removeFragmentsInRange(start, Infinity, this.playlistType, false, true); } updateLevelTiming(frag, part, level, partial) { var _this$transmuxer; const details = level.details; if (!details) { this.warn('level.details undefined'); return; } const parsed = Object.keys(frag.elementaryStreams).reduce((result, type) => { const info = frag.elementaryStreams[type]; if (info) { const parsedDuration = info.endPTS - info.startPTS; if (parsedDuration <= 0) { // Destroy the transmuxer after it's next time offset failed to advance because duration was <= 0. // The new transmuxer will be configured with a time offset matching the next fragment start, // preventing the timeline from shifting. this.warn(`Could not parse fragment ${frag.sn} ${type} duration reliably (${parsedDuration})`); return result || false; } const drift = partial ? 0 : updateFragPTSDTS(details, frag, info.startPTS, info.endPTS, info.startDTS, info.endDTS); this.hls.trigger(Events.LEVEL_PTS_UPDATED, { details, level, drift, type, frag, start: info.startPTS, end: info.endPTS }); return true; } return result; }, false); if (parsed) { level.fragmentError = 0; } else if (((_this$transmuxer = this.transmuxer) == null ? void 0 : _this$transmuxer.error) === null) { const error = new Error(`Found no media in fragment ${frag.sn} of level ${frag.level} resetting transmuxer to fallback to playlist timing`); if (level.fragmentError === 0) { // Mark and track the odd empty segment as a gap to avoid reloading level.fragmentError++; frag.gap = true; this.fragmentTracker.removeFragment(frag); this.fragmentTracker.fragBuffered(frag, true); } this.warn(error.message); this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, error, frag, reason: `Found no media in msn ${frag.sn} of level "${level.url}"` }); if (!this.hls) { return; } this.resetTransmuxer(); // For this error fallthrough. Marking parsed will allow advancing to next fragment. } this.state = State.PARSED; this.hls.trigger(Events.FRAG_PARSED, { frag, part }); } resetTransmuxer() { if (this.transmuxer) { this.transmuxer.destroy(); this.transmuxer = null; } } recoverWorkerError(data) { if (data.event === 'demuxerWorker') { var _ref, _this$levelLastLoaded3, _this$fragCurrent3; this.fragmentTracker.removeAllFragments(); this.resetTransmuxer(); this.resetStartWhenNotLoaded((_ref = (_this$levelLastLoaded3 = this.levelLastLoaded) != null ? _this$levelLastLoaded3 : (_this$fragCurrent3 = this.fragCurrent) == null ? void 0 : _this$fragCurrent3.level) != null ? _ref : 0); this.resetLoadingState(); } } set state(nextState) { const previousState = this._state; if (previousState !== nextState) { this._state = nextState; this.log(`${previousState}->${nextState}`); } } get state() { return this._state; } } function getSourceBuffer() { return self.SourceBuffer || self.WebKitSourceBuffer; } /** * @ignore */ function isSupported() { const mediaSource = getMediaSource(); if (!mediaSource) { return false; } const sourceBuffer = getSourceBuffer(); const isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid // Older browsers do not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible const sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function'; return !!isTypeSupported && !!sourceBufferValidAPI; } /** * @ignore */ function changeTypeSupported() { var _sourceBuffer$prototy; const sourceBuffer = getSourceBuffer(); return typeof (sourceBuffer == null ? void 0 : (_sourceBuffer$prototy = sourceBuffer.prototype) == null ? void 0 : _sourceBuffer$prototy.changeType) === 'function'; } // ensure the worker ends up in the bundle // If the worker should not be included this gets aliased to empty.js function hasUMDWorker() { return typeof __HLS_WORKER_BUNDLE__ === 'function'; } function injectWorker() { const blob = new self.Blob([`var exports={};var module={exports:exports};function define(f){f()};define.amd=true;(${__HLS_WORKER_BUNDLE__.toString()})(true);`], { type: 'text/javascript' }); const objectURL = self.URL.createObjectURL(blob); const worker = new self.Worker(objectURL); return { worker, objectURL }; } function loadWorker(path) { const scriptURL = new self.URL(path, self.location.href).href; const worker = new self.Worker(scriptURL); return { worker, scriptURL }; } function dummyTrack(type = '', inputTimeScale = 90000) { return { type, id: -1, pid: -1, inputTimeScale, sequenceNumber: -1, samples: [], dropped: 0 }; } class BaseAudioDemuxer { constructor() { this._audioTrack = void 0; this._id3Track = void 0; this.frameIndex = 0; this.cachedData = null; this.basePTS = null; this.initPTS = null; this.lastPTS = null; } resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) { this._id3Track = { type: 'id3', id: 3, pid: -1, inputTimeScale: 90000, sequenceNumber: 0, samples: [], dropped: 0 }; } resetTimeStamp(deaultTimestamp) { this.initPTS = deaultTimestamp; this.resetContiguity(); } resetContiguity() { this.basePTS = null; this.lastPTS = null; this.frameIndex = 0; } canParse(data, offset) { return false; } appendFrame(track, data, offset) {} // feed incoming data to the front of the parsing pipeline demux(data, timeOffset) { if (this.cachedData) { data = appendUint8Array(this.cachedData, data); this.cachedData = null; } let id3Data = getID3Data(data, 0); let offset = id3Data ? id3Data.length : 0; let lastDataIndex; const track = this._audioTrack; const id3Track = this._id3Track; const timestamp = id3Data ? getTimeStamp(id3Data) : undefined; const length = data.length; if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) { this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS); this.lastPTS = this.basePTS; } if (this.lastPTS === null) { this.lastPTS = this.basePTS; } // more expressive than alternative: id3Data?.length if (id3Data && id3Data.length > 0) { id3Track.samples.push({ pts: this.lastPTS, dts: this.lastPTS, data: id3Data, type: MetadataSchema.audioId3, duration: Number.POSITIVE_INFINITY }); } while (offset < length) { if (this.canParse(data, offset)) { const frame = this.appendFrame(track, data, offset); if (frame) { this.frameIndex++; this.lastPTS = frame.sample.pts; offset += frame.length; lastDataIndex = offset; } else { offset = length; } } else if (canParse$2(data, offset)) { // after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data id3Data = getID3Data(data, offset); id3Track.samples.push({ pts: this.lastPTS, dts: this.lastPTS, data: id3Data, type: MetadataSchema.audioId3, duration: Number.POSITIVE_INFINITY }); offset += id3Data.length; lastDataIndex = offset; } else { offset++; } if (offset === length && lastDataIndex !== length) { const partialData = sliceUint8(data, lastDataIndex); if (this.cachedData) { this.cachedData = appendUint8Array(this.cachedData, partialData); } else { this.cachedData = partialData; } } } return { audioTrack: track, videoTrack: dummyTrack(), id3Track, textTrack: dummyTrack() }; } demuxSampleAes(data, keyData, timeOffset) { return Promise.reject(new Error(`[${this}] This demuxer does not support Sample-AES decryption`)); } flush(timeOffset) { // Parse cache in case of remaining frames. const cachedData = this.cachedData; if (cachedData) { this.cachedData = null; this.demux(cachedData, 0); } return { audioTrack: this._audioTrack, videoTrack: dummyTrack(), id3Track: this._id3Track, textTrack: dummyTrack() }; } destroy() {} } /** * Initialize PTS * <p> * use timestamp unless it is undefined, NaN or Infinity * </p> */ const initPTSFn = (timestamp, timeOffset, initPTS) => { if (isFiniteNumber(timestamp)) { return timestamp * 90; } const init90kHz = initPTS ? initPTS.baseTime * 90000 / initPTS.timescale : 0; return timeOffset * 90000 + init90kHz; }; /** * ADTS parser helper * @link https://wiki.multimedia.cx/index.php?title=ADTS */ function getAudioConfig(observer, data, offset, audioCodec) { let adtsObjectType; let adtsExtensionSamplingIndex; let adtsChannelConfig; let config; const userAgent = navigator.userAgent.toLowerCase(); const manifestCodec = audioCodec; const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; // byte 2 adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1; const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2; if (adtsSamplingIndex > adtsSamplingRates.length - 1) { observer.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSamplingIndex}` }); return; } adtsChannelConfig = (data[offset + 2] & 0x01) << 2; // byte 3 adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6; logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`); // firefox: freq less than 24kHz = AAC SBR (HE-AAC) if (/firefox/i.test(userAgent)) { if (adtsSamplingIndex >= 6) { adtsObjectType = 5; config = new Array(4); // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies // there is a factor 2 between frame sample rate and output sample rate // multiply frequency by 2 (see table below, equivalent to substract 3) adtsExtensionSamplingIndex = adtsSamplingIndex - 3; } else { adtsObjectType = 2; config = new Array(2); adtsExtensionSamplingIndex = adtsSamplingIndex; } // Android : always use AAC } else if (userAgent.indexOf('android') !== -1) { adtsObjectType = 2; config = new Array(2); adtsExtensionSamplingIndex = adtsSamplingIndex; } else { /* for other browsers (Chrome/Vivaldi/Opera ...) always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...) */ adtsObjectType = 5; config = new Array(4); // if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz) if (audioCodec && (audioCodec.indexOf('mp4a.40.29') !== -1 || audioCodec.indexOf('mp4a.40.5') !== -1) || !audioCodec && adtsSamplingIndex >= 6) { // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies // there is a factor 2 between frame sample rate and output sample rate // multiply frequency by 2 (see table below, equivalent to substract 3) adtsExtensionSamplingIndex = adtsSamplingIndex - 3; } else { // if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio) // Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo. if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSamplingIndex >= 6 && adtsChannelConfig === 1 || /vivaldi/i.test(userAgent)) || !audioCodec && adtsChannelConfig === 1) { adtsObjectType = 2; config = new Array(2); } adtsExtensionSamplingIndex = adtsSamplingIndex; } } /* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig() Audio Profile / Audio Object Type 0: Null 1: AAC Main 2: AAC LC (Low Complexity) 3: AAC SSR (Scalable Sample Rate) 4: AAC LTP (Long Term Prediction) 5: SBR (Spectral Band Replication) 6: AAC Scalable sampling freq 0: 96000 Hz 1: 88200 Hz 2: 64000 Hz 3: 48000 Hz 4: 44100 Hz 5: 32000 Hz 6: 24000 Hz 7: 22050 Hz 8: 16000 Hz 9: 12000 Hz 10: 11025 Hz 11: 8000 Hz 12: 7350 Hz 13: Reserved 14: Reserved 15: frequency is written explictly Channel Configurations These are the channel configurations: 0: Defined in AOT Specifc Config 1: 1 channel: front-center 2: 2 channels: front-left, front-right */ // audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1 config[0] = adtsObjectType << 3; // samplingFrequencyIndex config[0] |= (adtsSamplingIndex & 0x0e) >> 1; config[1] |= (adtsSamplingIndex & 0x01) << 7; // channelConfiguration config[1] |= adtsChannelConfig << 3; if (adtsObjectType === 5) { // adtsExtensionSamplingIndex config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1; config[2] = (adtsExtensionSamplingIndex & 0x01) << 7; // adtsObjectType (force to 2, chrome is checking that object type is less than 5 ??? // https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc config[2] |= 2 << 2; config[3] = 0; } return { config, samplerate: adtsSamplingRates[adtsSamplingIndex], channelCount: adtsChannelConfig, codec: 'mp4a.40.' + adtsObjectType, manifestCodec }; } function isHeaderPattern$1(data, offset) { return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0; } function getHeaderLength(data, offset) { return data[offset + 1] & 0x01 ? 7 : 9; } function getFullFrameLength(data, offset) { return (data[offset + 3] & 0x03) << 11 | data[offset + 4] << 3 | (data[offset + 5] & 0xe0) >>> 5; } function canGetFrameLength(data, offset) { return offset + 5 < data.length; } function isHeader$1(data, offset) { // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1 // Layer bits (position 14 and 15) in header should be always 0 for ADTS // More info https://wiki.multimedia.cx/index.php?title=ADTS return offset + 1 < data.length && isHeaderPattern$1(data, offset); } function canParse$1(data, offset) { return canGetFrameLength(data, offset) && isHeaderPattern$1(data, offset) && getFullFrameLength(data, offset) <= data.length - offset; } function probe$1(data, offset) { // same as isHeader but we also check that ADTS frame follows last ADTS frame // or end of data is reached if (isHeader$1(data, offset)) { // ADTS header Length const headerLength = getHeaderLength(data, offset); if (offset + headerLength >= data.length) { return false; } // ADTS frame Length const frameLength = getFullFrameLength(data, offset); if (frameLength <= headerLength) { return false; } const newOffset = offset + frameLength; return newOffset === data.length || isHeader$1(data, newOffset); } return false; } function initTrackConfig(track, observer, data, offset, audioCodec) { if (!track.samplerate) { const config = getAudioConfig(observer, data, offset, audioCodec); if (!config) { return; } track.config = config.config; track.samplerate = config.samplerate; track.channelCount = config.channelCount; track.codec = config.codec; track.manifestCodec = config.manifestCodec; logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`); } } function getFrameDuration(samplerate) { return 1024 * 90000 / samplerate; } function parseFrameHeader(data, offset) { // The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header const headerLength = getHeaderLength(data, offset); if (offset + headerLength <= data.length) { // retrieve frame size const frameLength = getFullFrameLength(data, offset) - headerLength; if (frameLength > 0) { // logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}`); return { headerLength, frameLength }; } } } function appendFrame$1(track, data, offset, pts, frameIndex) { const frameDuration = getFrameDuration(track.samplerate); const stamp = pts + frameIndex * frameDuration; const header = parseFrameHeader(data, offset); let unit; if (header) { const { frameLength, headerLength } = header; const _length = headerLength + frameLength; const missing = Math.max(0, offset + _length - data.length); // logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`); if (missing) { unit = new Uint8Array(_length - headerLength); unit.set(data.subarray(offset + headerLength, data.length), 0); } else { unit = data.subarray(offset + headerLength, offset + _length); } const _sample = { unit, pts: stamp }; if (!missing) { track.samples.push(_sample); } return { sample: _sample, length: _length, missing }; } // overflow incomplete header const length = data.length - offset; unit = new Uint8Array(length); unit.set(data.subarray(offset, data.length), 0); const sample = { unit, pts: stamp }; return { sample, length, missing: -1 }; } /** * AAC demuxer */ class AACDemuxer extends BaseAudioDemuxer { constructor(observer, config) { super(); this.observer = void 0; this.config = void 0; this.observer = observer; this.config = config; } resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) { super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration); this._audioTrack = { container: 'audio/adts', type: 'audio', id: 2, pid: -1, sequenceNumber: 0, segmentCodec: 'aac', samples: [], manifestCodec: audioCodec, duration: trackDuration, inputTimeScale: 90000, dropped: 0 }; } // Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS static probe(data) { if (!data) { return false; } // Check for the ADTS sync word // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1 // Layer bits (position 14 and 15) in header should be always 0 for ADTS // More info https://wiki.multimedia.cx/index.php?title=ADTS const id3Data = getID3Data(data, 0) || []; let offset = id3Data.length; for (let length = data.length; offset < length; offset++) { if (probe$1(data, offset)) { logger.log('ADTS sync word found !'); return true; } } return false; } canParse(data, offset) { return canParse$1(data, offset); } appendFrame(track, data, offset) { initTrackConfig(track, this.observer, data, offset, track.manifestCodec); const frame = appendFrame$1(track, data, offset, this.basePTS, this.frameIndex); if (frame && frame.missing === 0) { return frame; } } } const emsgSchemePattern = /\/emsg[-/]ID3/i; class MP4Demuxer { constructor(observer, config) { this.remainderData = null; this.timeOffset = 0; this.config = void 0; this.videoTrack = void 0; this.audioTrack = void 0; this.id3Track = void 0; this.txtTrack = void 0; this.config = config; } resetTimeStamp() {} resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) { const videoTrack = this.videoTrack = dummyTrack('video', 1); const audioTrack = this.audioTrack = dummyTrack('audio', 1); const captionTrack = this.txtTrack = dummyTrack('text', 1); this.id3Track = dummyTrack('id3', 1); this.timeOffset = 0; if (!(initSegment != null && initSegment.byteLength)) { return; } const initData = parseInitSegment(initSegment); if (initData.video) { const { id, timescale, codec } = initData.video; videoTrack.id = id; videoTrack.timescale = captionTrack.timescale = timescale; videoTrack.codec = codec; } if (initData.audio) { const { id, timescale, codec } = initData.audio; audioTrack.id = id; audioTrack.timescale = timescale; audioTrack.codec = codec; } captionTrack.id = RemuxerTrackIdConfig.text; videoTrack.sampleDuration = 0; videoTrack.duration = audioTrack.duration = trackDuration; } resetContiguity() { this.remainderData = null; } static probe(data) { // ensure we find a moof box in the first 16 kB data = data.length > 16384 ? data.subarray(0, 16384) : data; return findBox(data, ['moof']).length > 0; } demux(data, timeOffset) { this.timeOffset = timeOffset; // Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter let videoSamples = data; const videoTrack = this.videoTrack; const textTrack = this.txtTrack; if (this.config.progressive) { // Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else. // This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee // that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception. if (this.remainderData) { videoSamples = appendUint8Array(this.remainderData, data); } const segmentedData = segmentValidRange(videoSamples); this.remainderData = segmentedData.remainder; videoTrack.samples = segmentedData.valid || new Uint8Array(); } else { videoTrack.samples = videoSamples; } const id3Track = this.extractID3Track(videoTrack, timeOffset); textTrack.samples = parseSamples(timeOffset, videoTrack); return { videoTrack, audioTrack: this.audioTrack, id3Track, textTrack: this.txtTrack }; } flush() { const timeOffset = this.timeOffset; const videoTrack = this.videoTrack; const textTrack = this.txtTrack; videoTrack.samples = this.remainderData || new Uint8Array(); this.remainderData = null; const id3Track = this.extractID3Track(videoTrack, this.timeOffset); textTrack.samples = parseSamples(timeOffset, videoTrack); return { videoTrack, audioTrack: dummyTrack(), id3Track, textTrack: dummyTrack() }; } extractID3Track(videoTrack, timeOffset) { const id3Track = this.id3Track; if (videoTrack.samples.length) { const emsgs = findBox(videoTrack.samples, ['emsg']); if (emsgs) { emsgs.forEach(data => { const emsgInfo = parseEmsg(data); if (emsgSchemePattern.test(emsgInfo.schemeIdUri)) { const pts = isFiniteNumber(emsgInfo.presentationTime) ? emsgInfo.presentationTime / emsgInfo.timeScale : timeOffset + emsgInfo.presentationTimeDelta / emsgInfo.timeScale; let duration = emsgInfo.eventDuration === 0xffffffff ? Number.POSITIVE_INFINITY : emsgInfo.eventDuration / emsgInfo.timeScale; // Safari takes anything <= 0.001 seconds and maps it to Infinity if (duration <= 0.001) { duration = Number.POSITIVE_INFINITY; } const payload = emsgInfo.payload; id3Track.samples.push({ data: payload, len: payload.byteLength, dts: pts, pts: pts, type: MetadataSchema.emsg, duration: duration }); } }); } } return id3Track; } demuxSampleAes(data, keyData, timeOffset) { return Promise.reject(new Error('The MP4 demuxer does not support SAMPLE-AES decryption')); } destroy() {} } /** * MPEG parser helper */ let chromeVersion$1 = null; const BitratesMap = [32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160]; const SamplingRateMap = [44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000]; const SamplesCoefficients = [ // MPEG 2.5 [0, // Reserved 72, // Layer3 144, // Layer2 12 // Layer1 ], // Reserved [0, // Reserved 0, // Layer3 0, // Layer2 0 // Layer1 ], // MPEG 2 [0, // Reserved 72, // Layer3 144, // Layer2 12 // Layer1 ], // MPEG 1 [0, // Reserved 144, // Layer3 144, // Layer2 12 // Layer1 ]]; const BytesInSlot = [0, // Reserved 1, // Layer3 1, // Layer2 4 // Layer1 ]; function appendFrame(track, data, offset, pts, frameIndex) { // Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference if (offset + 24 > data.length) { return; } const header = parseHeader(data, offset); if (header && offset + header.frameLength <= data.length) { const frameDuration = header.samplesPerFrame * 90000 / header.sampleRate; const stamp = pts + frameIndex * frameDuration; const sample = { unit: data.subarray(offset, offset + header.frameLength), pts: stamp, dts: stamp }; track.config = []; track.channelCount = header.channelCount; track.samplerate = header.sampleRate; track.samples.push(sample); return { sample, length: header.frameLength, missing: 0 }; } } function parseHeader(data, offset) { const mpegVersion = data[offset + 1] >> 3 & 3; const mpegLayer = data[offset + 1] >> 1 & 3; const bitRateIndex = data[offset + 2] >> 4 & 15; const sampleRateIndex = data[offset + 2] >> 2 & 3; if (mpegVersion !== 1 && bitRateIndex !== 0 && bitRateIndex !== 15 && sampleRateIndex !== 3) { const paddingBit = data[offset + 2] >> 1 & 1; const channelMode = data[offset + 3] >> 6; const columnInBitrates = mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4; const bitRate = BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000; const columnInSampleRates = mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2; const sampleRate = SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex]; const channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono) const sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer]; const bytesInSlot = BytesInSlot[mpegLayer]; const samplesPerFrame = sampleCoefficient * 8 * bytesInSlot; const frameLength = Math.floor(sampleCoefficient * bitRate / sampleRate + paddingBit) * bytesInSlot; if (chromeVersion$1 === null) { const userAgent = navigator.userAgent || ''; const result = userAgent.match(/Chrome\/(\d+)/i); chromeVersion$1 = result ? parseInt(result[1]) : 0; } const needChromeFix = !!chromeVersion$1 && chromeVersion$1 <= 87; if (needChromeFix && mpegLayer === 2 && bitRate >= 224000 && channelMode === 0) { // Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00) data[offset + 3] = data[offset + 3] | 0x80; } return { sampleRate, channelCount, frameLength, samplesPerFrame }; } } function isHeaderPattern(data, offset) { return data[offset] === 0xff && (data[offset + 1] & 0xe0) === 0xe0 && (data[offset + 1] & 0x06) !== 0x00; } function isHeader(data, offset) { // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1 // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III) // More info http://www.mp3-tech.org/programmer/frame_header.html return offset + 1 < data.length && isHeaderPattern(data, offset); } function canParse(data, offset) { const headerSize = 4; return isHeaderPattern(data, offset) && headerSize <= data.length - offset; } function probe(data, offset) { // same as isHeader but we also check that MPEG frame follows last MPEG frame // or end of data is reached if (offset + 1 < data.length && isHeaderPattern(data, offset)) { // MPEG header Length const headerLength = 4; // MPEG frame Length const header = parseHeader(data, offset); let frameLength = headerLength; if (header != null && header.frameLength) { frameLength = header.frameLength; } const newOffset = offset + frameLength; return newOffset === data.length || isHeader(data, newOffset); } return false; } /** * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264. */ class ExpGolomb { constructor(data) { this.data = void 0; this.bytesAvailable = void 0; this.word = void 0; this.bitsAvailable = void 0; this.data = data; // the number of bytes left to examine in this.data this.bytesAvailable = data.byteLength; // the current word being examined this.word = 0; // :uint // the number of bits left to examine in the current word this.bitsAvailable = 0; // :uint } // ():void loadWord() { const data = this.data; const bytesAvailable = this.bytesAvailable; const position = data.byteLength - bytesAvailable; const workingBytes = new Uint8Array(4); const availableBytes = Math.min(4, bytesAvailable); if (availableBytes === 0) { throw new Error('no bytes available'); } workingBytes.set(data.subarray(position, position + availableBytes)); this.word = new DataView(workingBytes.buffer).getUint32(0); // track the amount of this.data that has been processed this.bitsAvailable = availableBytes * 8; this.bytesAvailable -= availableBytes; } // (count:int):void skipBits(count) { let skipBytes; // :int count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable); if (this.bitsAvailable > count) { this.word <<= count; this.bitsAvailable -= count; } else { count -= this.bitsAvailable; skipBytes = count >> 3; count -= skipBytes << 3; this.bytesAvailable -= skipBytes; this.loadWord(); this.word <<= count; this.bitsAvailable -= count; } } // (size:int):uint readBits(size) { let bits = Math.min(this.bitsAvailable, size); // :uint const valu = this.word >>> 32 - bits; // :uint if (size > 32) { logger.error('Cannot read more than 32 bits at a time'); } this.bitsAvailable -= bits; if (this.bitsAvailable > 0) { this.word <<= bits; } else if (this.bytesAvailable > 0) { this.loadWord(); } else { throw new Error('no bits available'); } bits = size - bits; if (bits > 0 && this.bitsAvailable) { return valu << bits | this.readBits(bits); } else { return valu; } } // ():uint skipLZ() { let leadingZeroCount; // :uint for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) { if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) { // the first bit of working word is 1 this.word <<= leadingZeroCount; this.bitsAvailable -= leadingZeroCount; return leadingZeroCount; } } // we exhausted word and still have not found a 1 this.loadWord(); return leadingZeroCount + this.skipLZ(); } // ():void skipUEG() { this.skipBits(1 + this.skipLZ()); } // ():void skipEG() { this.skipBits(1 + this.skipLZ()); } // ():uint readUEG() { const clz = this.skipLZ(); // :uint return this.readBits(clz + 1) - 1; } // ():int readEG() { const valu = this.readUEG(); // :int if (0x01 & valu) { // the number is odd if the low order bit is set return 1 + valu >>> 1; // add 1 to make it even, and divide by 2 } else { return -1 * (valu >>> 1); // divide by two then make it negative } } // Some convenience functions // :Boolean readBoolean() { return this.readBits(1) === 1; } // ():int readUByte() { return this.readBits(8); } // ():int readUShort() { return this.readBits(16); } // ():int readUInt() { return this.readBits(32); } /** * Advance the ExpGolomb decoder past a scaling list. The scaling * list is optionally transmitted as part of a sequence parameter * set and is not relevant to transmuxing. * @param count the number of entries in this scaling list * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1 */ skipScalingList(count) { let lastScale = 8; let nextScale = 8; let deltaScale; for (let j = 0; j < count; j++) { if (nextScale !== 0) { deltaScale = this.readEG(); nextScale = (lastScale + deltaScale + 256) % 256; } lastScale = nextScale === 0 ? lastScale : nextScale; } } /** * Read a sequence parameter set and return some interesting video * properties. A sequence parameter set is the H264 metadata that * describes the properties of upcoming video frames. * @returns an object with configuration parsed from the * sequence parameter set, including the dimensions of the * associated video frames. */ readSPS() { let frameCropLeftOffset = 0; let frameCropRightOffset = 0; let frameCropTopOffset = 0; let frameCropBottomOffset = 0; let numRefFramesInPicOrderCntCycle; let scalingListCount; let i; const readUByte = this.readUByte.bind(this); const readBits = this.readBits.bind(this); const readUEG = this.readUEG.bind(this); const readBoolean = this.readBoolean.bind(this); const skipBits = this.skipBits.bind(this); const skipEG = this.skipEG.bind(this); const skipUEG = this.skipUEG.bind(this); const skipScalingList = this.skipScalingList.bind(this); readUByte(); const profileIdc = readUByte(); // profile_idc readBits(5); // profileCompat constraint_set[0-4]_flag, u(5) skipBits(3); // reserved_zero_3bits u(3), readUByte(); // level_idc u(8) skipUEG(); // seq_parameter_set_id // some profiles have more optional data we don't need if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) { const chromaFormatIdc = readUEG(); if (chromaFormatIdc === 3) { skipBits(1); } // separate_colour_plane_flag skipUEG(); // bit_depth_luma_minus8 skipUEG(); // bit_depth_chroma_minus8 skipBits(1); // qpprime_y_zero_transform_bypass_flag if (readBoolean()) { // seq_scaling_matrix_present_flag scalingListCount = chromaFormatIdc !== 3 ? 8 : 12; for (i = 0; i < scalingListCount; i++) { if (readBoolean()) { // seq_scaling_list_present_flag[ i ] if (i < 6) { skipScalingList(16); } else { skipScalingList(64); } } } } } skipUEG(); // log2_max_frame_num_minus4 const picOrderCntType = readUEG(); if (picOrderCntType === 0) { readUEG(); // log2_max_pic_order_cnt_lsb_minus4 } else if (picOrderCntType === 1) { skipBits(1); // delta_pic_order_always_zero_flag skipEG(); // offset_for_non_ref_pic skipEG(); // offset_for_top_to_bottom_field numRefFramesInPicOrderCntCycle = readUEG(); for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) { skipEG(); } // offset_for_ref_frame[ i ] } skipUEG(); // max_num_ref_frames skipBits(1); // gaps_in_frame_num_value_allowed_flag const picWidthInMbsMinus1 = readUEG(); const picHeightInMapUnitsMinus1 = readUEG(); const frameMbsOnlyFlag = readBits(1); if (frameMbsOnlyFlag === 0) { skipBits(1); } // mb_adaptive_frame_field_flag skipBits(1); // direct_8x8_inference_flag if (readBoolean()) { // frame_cropping_flag frameCropLeftOffset = readUEG(); frameCropRightOffset = readUEG(); frameCropTopOffset = readUEG(); frameCropBottomOffset = readUEG(); } let pixelRatio = [1, 1]; if (readBoolean()) { // vui_parameters_present_flag if (readBoolean()) { // aspect_ratio_info_present_flag const aspectRatioIdc = readUByte(); switch (aspectRatioIdc) { case 1: pixelRatio = [1, 1]; break; case 2: pixelRatio = [12, 11]; break; case 3: pixelRatio = [10, 11]; break; case 4: pixelRatio = [16, 11]; break; case 5: pixelRatio = [40, 33]; break; case 6: pixelRatio = [24, 11]; break; case 7: pixelRatio = [20, 11]; break; case 8: pixelRatio = [32, 11]; break; case 9: pixelRatio = [80, 33]; break; case 10: pixelRatio = [18, 11]; break; case 11: pixelRatio = [15, 11]; break; case 12: pixelRatio = [64, 33]; break; case 13: pixelRatio = [160, 99]; break; case 14: pixelRatio = [4, 3]; break; case 15: pixelRatio = [3, 2]; break; case 16: pixelRatio = [2, 1]; break; case 255: { pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()]; break; } } } } return { width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2), height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset), pixelRatio: pixelRatio }; } readSliceType() { // skip NALu type this.readUByte(); // discard first_mb_in_slice this.readUEG(); // return slice_type return this.readUEG(); } } /** * SAMPLE-AES decrypter */ class SampleAesDecrypter { constructor(observer, config, keyData) { this.keyData = void 0; this.decrypter = void 0; this.keyData = keyData; this.decrypter = new Decrypter(config, { removePKCS7Padding: false }); } decryptBuffer(encryptedData) { return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer); } // AAC - encrypt all full 16 bytes blocks starting from offset 16 decryptAacSample(samples, sampleIndex, callback) { const curUnit = samples[sampleIndex].unit; if (curUnit.length <= 16) { // No encrypted portion in this sample (first 16 bytes is not // encrypted, see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/HLS_Sample_Encryption/Encryption/Encryption.html), return; } const encryptedData = curUnit.subarray(16, curUnit.length - curUnit.length % 16); const encryptedBuffer = encryptedData.buffer.slice(encryptedData.byteOffset, encryptedData.byteOffset + encryptedData.length); this.decryptBuffer(encryptedBuffer).then(decryptedBuffer => { const decryptedData = new Uint8Array(decryptedBuffer); curUnit.set(decryptedData, 16); if (!this.decrypter.isSync()) { this.decryptAacSamples(samples, sampleIndex + 1, callback); } }); } decryptAacSamples(samples, sampleIndex, callback) { for (;; sampleIndex++) { if (sampleIndex >= samples.length) { callback(); return; } if (samples[sampleIndex].unit.length < 32) { continue; } this.decryptAacSample(samples, sampleIndex, callback); if (!this.decrypter.isSync()) { return; } } } // AVC - encrypt one 16 bytes block out of ten, starting from offset 32 getAvcEncryptedData(decodedData) { const encryptedDataLen = Math.floor((decodedData.length - 48) / 160) * 16 + 16; const encryptedData = new Int8Array(encryptedDataLen); let outputPos = 0; for (let inputPos = 32; inputPos < decodedData.length - 16; inputPos += 160, outputPos += 16) { encryptedData.set(decodedData.subarray(inputPos, inputPos + 16), outputPos); } return encryptedData; } getAvcDecryptedUnit(decodedData, decryptedData) { const uint8DecryptedData = new Uint8Array(decryptedData); let inputPos = 0; for (let outputPos = 32; outputPos < decodedData.length - 16; outputPos += 160, inputPos += 16) { decodedData.set(uint8DecryptedData.subarray(inputPos, inputPos + 16), outputPos); } return decodedData; } decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit) { const decodedData = discardEPB(curUnit.data); const encryptedData = this.getAvcEncryptedData(decodedData); this.decryptBuffer(encryptedData.buffer).then(decryptedBuffer => { curUnit.data = this.getAvcDecryptedUnit(decodedData, decryptedBuffer); if (!this.decrypter.isSync()) { this.decryptAvcSamples(samples, sampleIndex, unitIndex + 1, callback); } }); } decryptAvcSamples(samples, sampleIndex, unitIndex, callback) { if (samples instanceof Uint8Array) { throw new Error('Cannot decrypt samples of type Uint8Array'); } for (;; sampleIndex++, unitIndex = 0) { if (sampleIndex >= samples.length) { callback(); return; } const curUnits = samples[sampleIndex].units; for (;; unitIndex++) { if (unitIndex >= curUnits.length) { break; } const curUnit = curUnits[unitIndex]; if (curUnit.data.length <= 48 || curUnit.type !== 1 && curUnit.type !== 5) { continue; } this.decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit); if (!this.decrypter.isSync()) { return; } } } } } const PACKET_LENGTH = 188; class TSDemuxer { constructor(observer, config, typeSupported) { this.observer = void 0; this.config = void 0; this.typeSupported = void 0; this.sampleAes = null; this.pmtParsed = false; this.audioCodec = void 0; this.videoCodec = void 0; this._duration = 0; this._pmtId = -1; this._avcTrack = void 0; this._audioTrack = void 0; this._id3Track = void 0; this._txtTrack = void 0; this.aacOverFlow = null; this.avcSample = null; this.remainderData = null; this.observer = observer; this.config = config; this.typeSupported = typeSupported; } static probe(data) { const syncOffset = TSDemuxer.syncOffset(data); if (syncOffset > 0) { logger.warn(`MPEG2-TS detected but first sync word found @ offset ${syncOffset}`); } return syncOffset !== -1; } static syncOffset(data) { const length = data.length; let scanwindow = Math.min(PACKET_LENGTH * 5, data.length - PACKET_LENGTH) + 1; let i = 0; while (i < scanwindow) { // a TS init segment should contain at least 2 TS packets: PAT and PMT, each starting with 0x47 let foundPat = false; let packetStart = -1; let tsPackets = 0; for (let j = i; j < length; j += PACKET_LENGTH) { if (data[j] === 0x47) { tsPackets++; if (packetStart === -1) { packetStart = j; // First sync word found at offset, increase scan length (#5251) if (packetStart !== 0) { scanwindow = Math.min(packetStart + PACKET_LENGTH * 99, data.length - PACKET_LENGTH) + 1; } } if (!foundPat) { foundPat = parsePID(data, j) === 0; } // Sync word found at 0 with 3 packets, or found at offset least 2 packets up to scanwindow (#5501) if (foundPat && tsPackets > 1 && (packetStart === 0 && tsPackets > 2 || j + PACKET_LENGTH > scanwindow)) { return packetStart; } } else if (tsPackets) { // Exit if sync word found, but does not contain contiguous packets (#5501) return -1; } else { break; } } i++; } return -1; } /** * Creates a track model internal to demuxer used to drive remuxing input */ static createTrack(type, duration) { return { container: type === 'video' || type === 'audio' ? 'video/mp2t' : undefined, type, id: RemuxerTrackIdConfig[type], pid: -1, inputTimeScale: 90000, sequenceNumber: 0, samples: [], dropped: 0, duration: type === 'audio' ? duration : undefined }; } /** * Initializes a new init segment on the demuxer/remuxer interface. Needed for discontinuities/track-switches (or at stream start) * Resets all internal track instances of the demuxer. */ resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) { this.pmtParsed = false; this._pmtId = -1; this._avcTrack = TSDemuxer.createTrack('video'); this._audioTrack = TSDemuxer.createTrack('audio', trackDuration); this._id3Track = TSDemuxer.createTrack('id3'); this._txtTrack = TSDemuxer.createTrack('text'); this._audioTrack.segmentCodec = 'aac'; // flush any partial content this.aacOverFlow = null; this.avcSample = null; this.remainderData = null; this.audioCodec = audioCodec; this.videoCodec = videoCodec; this._duration = trackDuration; } resetTimeStamp() {} resetContiguity() { const { _audioTrack, _avcTrack, _id3Track } = this; if (_audioTrack) { _audioTrack.pesData = null; } if (_avcTrack) { _avcTrack.pesData = null; } if (_id3Track) { _id3Track.pesData = null; } this.aacOverFlow = null; this.avcSample = null; this.remainderData = null; } demux(data, timeOffset, isSampleAes = false, flush = false) { if (!isSampleAes) { this.sampleAes = null; } let pes; const videoTrack = this._avcTrack; const audioTrack = this._audioTrack; const id3Track = this._id3Track; const textTrack = this._txtTrack; let avcId = videoTrack.pid; let avcData = videoTrack.pesData; let audioId = audioTrack.pid; let id3Id = id3Track.pid; let audioData = audioTrack.pesData; let id3Data = id3Track.pesData; let unknownPID = null; let pmtParsed = this.pmtParsed; let pmtId = this._pmtId; let len = data.length; if (this.remainderData) { data = appendUint8Array(this.remainderData, data); len = data.length; this.remainderData = null; } if (len < PACKET_LENGTH && !flush) { this.remainderData = data; return { audioTrack, videoTrack, id3Track, textTrack }; } const syncOffset = Math.max(0, TSDemuxer.syncOffset(data)); len -= (len - syncOffset) % PACKET_LENGTH; if (len < data.byteLength && !flush) { this.remainderData = new Uint8Array(data.buffer, len, data.buffer.byteLength - len); } // loop through TS packets let tsPacketErrors = 0; for (let start = syncOffset; start < len; start += PACKET_LENGTH) { if (data[start] === 0x47) { const stt = !!(data[start + 1] & 0x40); const pid = parsePID(data, start); const atf = (data[start + 3] & 0x30) >> 4; // if an adaption field is present, its length is specified by the fifth byte of the TS packet header. let offset; if (atf > 1) { offset = start + 5 + data[start + 4]; // continue if there is only adaptation field if (offset === start + PACKET_LENGTH) { continue; } } else { offset = start + 4; } switch (pid) { case avcId: if (stt) { if (avcData && (pes = parsePES(avcData))) { this.parseAVCPES(videoTrack, textTrack, pes, false); } avcData = { data: [], size: 0 }; } if (avcData) { avcData.data.push(data.subarray(offset, start + PACKET_LENGTH)); avcData.size += start + PACKET_LENGTH - offset; } break; case audioId: if (stt) { if (audioData && (pes = parsePES(audioData))) { switch (audioTrack.segmentCodec) { case 'aac': this.parseAACPES(audioTrack, pes); break; case 'mp3': this.parseMPEGPES(audioTrack, pes); break; } } audioData = { data: [], size: 0 }; } if (audioData) { audioData.data.push(data.subarray(offset, start + PACKET_LENGTH)); audioData.size += start + PACKET_LENGTH - offset; } break; case id3Id: if (stt) { if (id3Data && (pes = parsePES(id3Data))) { this.parseID3PES(id3Track, pes); } id3Data = { data: [], size: 0 }; } if (id3Data) { id3Data.data.push(data.subarray(offset, start + PACKET_LENGTH)); id3Data.size += start + PACKET_LENGTH - offset; } break; case 0: if (stt) { offset += data[offset] + 1; } pmtId = this._pmtId = parsePAT(data, offset); // logger.log('PMT PID:' + this._pmtId); break; case pmtId: { if (stt) { offset += data[offset] + 1; } const parsedPIDs = parsePMT(data, offset, this.typeSupported, isSampleAes); // only update track id if track PID found while parsing PMT // this is to avoid resetting the PID to -1 in case // track PID transiently disappears from the stream // this could happen in case of transient missing audio samples for example // NOTE this is only the PID of the track as found in TS, // but we are not using this for MP4 track IDs. avcId = parsedPIDs.avc; if (avcId > 0) { videoTrack.pid = avcId; } audioId = parsedPIDs.audio; if (audioId > 0) { audioTrack.pid = audioId; audioTrack.segmentCodec = parsedPIDs.segmentCodec; } id3Id = parsedPIDs.id3; if (id3Id > 0) { id3Track.pid = id3Id; } if (unknownPID !== null && !pmtParsed) { logger.warn(`MPEG-TS PMT found at ${start} after unknown PID '${unknownPID}'. Backtracking to sync byte @${syncOffset} to parse all TS packets.`); unknownPID = null; // we set it to -188, the += 188 in the for loop will reset start to 0 start = syncOffset - 188; } pmtParsed = this.pmtParsed = true; break; } case 0x11: case 0x1fff: break; default: unknownPID = pid; break; } } else { tsPacketErrors++; } } if (tsPacketErrors > 0) { const error = new Error(`Found ${tsPacketErrors} TS packet/s that do not start with 0x47`); this.observer.emit(Events.ERROR, Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, error, reason: error.message }); } videoTrack.pesData = avcData; audioTrack.pesData = audioData; id3Track.pesData = id3Data; const demuxResult = { audioTrack, videoTrack, id3Track, textTrack }; if (flush) { this.extractRemainingSamples(demuxResult); } return demuxResult; } flush() { const { remainderData } = this; this.remainderData = null; let result; if (remainderData) { result = this.demux(remainderData, -1, false, true); } else { result = { videoTrack: this._avcTrack, audioTrack: this._audioTrack, id3Track: this._id3Track, textTrack: this._txtTrack }; } this.extractRemainingSamples(result); if (this.sampleAes) { return this.decrypt(result, this.sampleAes); } return result; } extractRemainingSamples(demuxResult) { const { audioTrack, videoTrack, id3Track, textTrack } = demuxResult; const avcData = videoTrack.pesData; const audioData = audioTrack.pesData; const id3Data = id3Track.pesData; // try to parse last PES packets let pes; if (avcData && (pes = parsePES(avcData))) { this.parseAVCPES(videoTrack, textTrack, pes, true); videoTrack.pesData = null; } else { // either avcData null or PES truncated, keep it for next frag parsing videoTrack.pesData = avcData; } if (audioData && (pes = parsePES(audioData))) { switch (audioTrack.segmentCodec) { case 'aac': this.parseAACPES(audioTrack, pes); break; case 'mp3': this.parseMPEGPES(audioTrack, pes); break; } audioTrack.pesData = null; } else { if (audioData != null && audioData.size) { logger.log('last AAC PES packet truncated,might overlap between fragments'); } // either audioData null or PES truncated, keep it for next frag parsing audioTrack.pesData = audioData; } if (id3Data && (pes = parsePES(id3Data))) { this.parseID3PES(id3Track, pes); id3Track.pesData = null; } else { // either id3Data null or PES truncated, keep it for next frag parsing id3Track.pesData = id3Data; } } demuxSampleAes(data, keyData, timeOffset) { const demuxResult = this.demux(data, timeOffset, true, !this.config.progressive); const sampleAes = this.sampleAes = new SampleAesDecrypter(this.observer, this.config, keyData); return this.decrypt(demuxResult, sampleAes); } decrypt(demuxResult, sampleAes) { return new Promise(resolve => { const { audioTrack, videoTrack } = demuxResult; if (audioTrack.samples && audioTrack.segmentCodec === 'aac') { sampleAes.decryptAacSamples(audioTrack.samples, 0, () => { if (videoTrack.samples) { sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, () => { resolve(demuxResult); }); } else { resolve(demuxResult); } }); } else if (videoTrack.samples) { sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, () => { resolve(demuxResult); }); } }); } destroy() { this._duration = 0; } parseAVCPES(track, textTrack, pes, last) { const units = this.parseAVCNALu(track, pes.data); let avcSample = this.avcSample; let push; let spsfound = false; // free pes.data to save up some memory pes.data = null; // if new NAL units found and last sample still there, let's push ... // this helps parsing streams with missing AUD (only do this if AUD never found) if (avcSample && units.length && !track.audFound) { pushAccessUnit(avcSample, track); avcSample = this.avcSample = createAVCSample(false, pes.pts, pes.dts, ''); } units.forEach(unit => { var _avcSample2; switch (unit.type) { // NDR case 1: { let iskey = false; push = true; const data = unit.data; // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...) if (spsfound && data.length > 4) { // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR const sliceType = new ExpGolomb(data).readSliceType(); // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples. // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice. // I slice: A slice that is not an SI slice that is decoded using intra prediction only. // if (sliceType === 2 || sliceType === 7) { if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) { iskey = true; } } if (iskey) { var _avcSample; // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push if ((_avcSample = avcSample) != null && _avcSample.frame && !avcSample.key) { pushAccessUnit(avcSample, track); avcSample = this.avcSample = null; } } if (!avcSample) { avcSample = this.avcSample = createAVCSample(true, pes.pts, pes.dts, ''); } avcSample.frame = true; avcSample.key = iskey; break; // IDR } case 5: push = true; // handle PES not starting with AUD // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push if ((_avcSample2 = avcSample) != null && _avcSample2.frame && !avcSample.key) { pushAccessUnit(avcSample, track); avcSample = this.avcSample = null; } if (!avcSample) { avcSample = this.avcSample = createAVCSample(true, pes.pts, pes.dts, ''); } avcSample.key = true; avcSample.frame = true; break; // SEI case 6: { push = true; parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples); break; // SPS } case 7: push = true; spsfound = true; if (!track.sps) { const sps = unit.data; const expGolombDecoder = new ExpGolomb(sps); const config = expGolombDecoder.readSPS(); track.width = config.width; track.height = config.height; track.pixelRatio = config.pixelRatio; track.sps = [sps]; track.duration = this._duration; const codecarray = sps.subarray(1, 4); let codecstring = 'avc1.'; for (let i = 0; i < 3; i++) { let h = codecarray[i].toString(16); if (h.length < 2) { h = '0' + h; } codecstring += h; } track.codec = codecstring; } break; // PPS case 8: push = true; if (!track.pps) { track.pps = [unit.data]; } break; // AUD case 9: push = false; track.audFound = true; if (avcSample) { pushAccessUnit(avcSample, track); } avcSample = this.avcSample = createAVCSample(false, pes.pts, pes.dts, ''); break; // Filler Data case 12: push = true; break; default: push = false; if (avcSample) { avcSample.debug += 'unknown NAL ' + unit.type + ' '; } break; } if (avcSample && push) { const units = avcSample.units; units.push(unit); } }); // if last PES packet, push samples if (last && avcSample) { pushAccessUnit(avcSample, track); this.avcSample = null; } } getLastNalUnit(samples) { var _avcSample3; let avcSample = this.avcSample; let lastUnit; // try to fallback to previous sample if current one is empty if (!avcSample || avcSample.units.length === 0) { avcSample = samples[samples.length - 1]; } if ((_avcSample3 = avcSample) != null && _avcSample3.units) { const units = avcSample.units; lastUnit = units[units.length - 1]; } return lastUnit; } parseAVCNALu(track, array) { const len = array.byteLength; let state = track.naluState || 0; const lastState = state; const units = []; let i = 0; let value; let overflow; let unitType; let lastUnitStart = -1; let lastUnitType = 0; // logger.log('PES:' + Hex.hexDump(array)); if (state === -1) { // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet lastUnitStart = 0; // NALu type is value read from offset 0 lastUnitType = array[0] & 0x1f; state = 0; i = 1; } while (i < len) { value = array[i++]; // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case if (!state) { state = value ? 0 : 1; continue; } if (state === 1) { state = value ? 0 : 2; continue; } // here we have state either equal to 2 or 3 if (!value) { state = 3; } else if (value === 1) { if (lastUnitStart >= 0) { const unit = { data: array.subarray(lastUnitStart, i - state - 1), type: lastUnitType }; // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength); units.push(unit); } else { // lastUnitStart is undefined => this is the first start code found in this PES packet // first check if start code delimiter is overlapping between 2 PES packets, // ie it started in last packet (lastState not zero) // and ended at the beginning of this PES packet (i <= 4 - lastState) const lastUnit = this.getLastNalUnit(track.samples); if (lastUnit) { if (lastState && i <= 4 - lastState) { // start delimiter overlapping between PES packets // strip start delimiter bytes from the end of last NAL unit // check if lastUnit had a state different from zero if (lastUnit.state) { // strip last bytes lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState); } } // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit. overflow = i - state - 1; if (overflow > 0) { // logger.log('first NALU found with overflow:' + overflow); const tmp = new Uint8Array(lastUnit.data.byteLength + overflow); tmp.set(lastUnit.data, 0); tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength); lastUnit.data = tmp; lastUnit.state = 0; } } } // check if we can read unit type if (i < len) { unitType = array[i] & 0x1f; // logger.log('find NALU @ offset:' + i + ',type:' + unitType); lastUnitStart = i; lastUnitType = unitType; state = 0; } else { // not enough byte to read unit type. let's read it on next PES parsing state = -1; } } else { state = 0; } } if (lastUnitStart >= 0 && state >= 0) { const unit = { data: array.subarray(lastUnitStart, len), type: lastUnitType, state: state }; units.push(unit); // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state); } // no NALu found if (units.length === 0) { // append pes.data to previous NAL unit const lastUnit = this.getLastNalUnit(track.samples); if (lastUnit) { const tmp = new Uint8Array(lastUnit.data.byteLength + array.byteLength); tmp.set(lastUnit.data, 0); tmp.set(array, lastUnit.data.byteLength); lastUnit.data = tmp; } } track.naluState = state; return units; } parseAACPES(track, pes) { let startOffset = 0; const aacOverFlow = this.aacOverFlow; let data = pes.data; if (aacOverFlow) { this.aacOverFlow = null; const frameMissingBytes = aacOverFlow.missing; const sampleLength = aacOverFlow.sample.unit.byteLength; // logger.log(`AAC: append overflowing ${sampleLength} bytes to beginning of new PES`); if (frameMissingBytes === -1) { const tmp = new Uint8Array(sampleLength + data.byteLength); tmp.set(aacOverFlow.sample.unit, 0); tmp.set(data, sampleLength); data = tmp; } else { const frameOverflowBytes = sampleLength - frameMissingBytes; aacOverFlow.sample.unit.set(data.subarray(0, frameMissingBytes), frameOverflowBytes); track.samples.push(aacOverFlow.sample); startOffset = aacOverFlow.missing; } } // look for ADTS header (0xFFFx) let offset; let len; for (offset = startOffset, len = data.length; offset < len - 1; offset++) { if (isHeader$1(data, offset)) { break; } } // if ADTS header does not start straight from the beginning of the PES payload, raise an error if (offset !== startOffset) { let reason; const recoverable = offset < len - 1; if (recoverable) { reason = `AAC PES did not start with ADTS header,offset:${offset}`; } else { reason = 'No ADTS header found in AAC PES'; } const error = new Error(reason); logger.warn(`parsing error: ${reason}`); this.observer.emit(Events.ERROR, Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, levelRetry: recoverable, error, reason }); if (!recoverable) { return; } } initTrackConfig(track, this.observer, data, offset, this.audioCodec); let pts; if (pes.pts !== undefined) { pts = pes.pts; } else if (aacOverFlow) { // if last AAC frame is overflowing, we should ensure timestamps are contiguous: // first sample PTS should be equal to last sample PTS + frameDuration const frameDuration = getFrameDuration(track.samplerate); pts = aacOverFlow.sample.pts + frameDuration; } else { logger.warn('[tsdemuxer]: AAC PES unknown PTS'); return; } // scan for aac samples let frameIndex = 0; let frame; while (offset < len) { frame = appendFrame$1(track, data, offset, pts, frameIndex); offset += frame.length; if (!frame.missing) { frameIndex++; for (; offset < len - 1; offset++) { if (isHeader$1(data, offset)) { break; } } } else { this.aacOverFlow = frame; break; } } } parseMPEGPES(track, pes) { const data = pes.data; const length = data.length; let frameIndex = 0; let offset = 0; const pts = pes.pts; if (pts === undefined) { logger.warn('[tsdemuxer]: MPEG PES unknown PTS'); return; } while (offset < length) { if (isHeader(data, offset)) { const frame = appendFrame(track, data, offset, pts, frameIndex); if (frame) { offset += frame.length; frameIndex++; } else { // logger.log('Unable to parse Mpeg audio frame'); break; } } else { // nothing found, keep looking offset++; } } } parseID3PES(id3Track, pes) { if (pes.pts === undefined) { logger.warn('[tsdemuxer]: ID3 PES unknown PTS'); return; } const id3Sample = _extends({}, pes, { type: this._avcTrack ? MetadataSchema.emsg : MetadataSchema.audioId3, duration: Number.POSITIVE_INFINITY }); id3Track.samples.push(id3Sample); } } function createAVCSample(key, pts, dts, debug) { return { key, frame: false, pts, dts, units: [], debug, length: 0 }; } function parsePID(data, offset) { // pid is a 13-bit field starting at the last bit of TS[1] return ((data[offset + 1] & 0x1f) << 8) + data[offset + 2]; } function parsePAT(data, offset) { // skip the PSI header and parse the first PMT entry return (data[offset + 10] & 0x1f) << 8 | data[offset + 11]; } function parsePMT(data, offset, typeSupported, isSampleAes) { const result = { audio: -1, avc: -1, id3: -1, segmentCodec: 'aac' }; const sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2]; const tableEnd = offset + 3 + sectionLength - 4; // to determine where the table is, we have to figure out how // long the program info descriptors are const programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11]; // advance the offset to the first entry in the mapping table offset += 12 + programInfoLength; while (offset < tableEnd) { const pid = parsePID(data, offset); switch (data[offset]) { case 0xcf: // SAMPLE-AES AAC if (!isSampleAes) { logger.log('ADTS AAC with AES-128-CBC frame encryption found in unencrypted stream'); break; } /* falls through */ case 0x0f: // ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio) // logger.log('AAC PID:' + pid); if (result.audio === -1) { result.audio = pid; } break; // Packetized metadata (ID3) case 0x15: // logger.log('ID3 PID:' + pid); if (result.id3 === -1) { result.id3 = pid; } break; case 0xdb: // SAMPLE-AES AVC if (!isSampleAes) { logger.log('H.264 with AES-128-CBC slice encryption found in unencrypted stream'); break; } /* falls through */ case 0x1b: // ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video) // logger.log('AVC PID:' + pid); if (result.avc === -1) { result.avc = pid; } break; // ISO/IEC 11172-3 (MPEG-1 audio) // or ISO/IEC 13818-3 (MPEG-2 halved sample rate audio) case 0x03: case 0x04: // logger.log('MPEG PID:' + pid); if (typeSupported.mpeg !== true && typeSupported.mp3 !== true) { logger.log('MPEG audio found, not supported in this browser'); } else if (result.audio === -1) { result.audio = pid; result.segmentCodec = 'mp3'; } break; case 0x24: logger.warn('Unsupported HEVC stream type found'); break; } // move to the next table entry // skip past the elementary stream descriptors, if present offset += ((data[offset + 3] & 0x0f) << 8 | data[offset + 4]) + 5; } return result; } function parsePES(stream) { let i = 0; let frag; let pesLen; let pesHdrLen; let pesPts; let pesDts; const data = stream.data; // safety check if (!stream || stream.size === 0) { return null; } // we might need up to 19 bytes to read PES header // if first chunk of data is less than 19 bytes, let's merge it with following ones until we get 19 bytes // usually only one merge is needed (and this is rare ...) while (data[0].length < 19 && data.length > 1) { const newData = new Uint8Array(data[0].length + data[1].length); newData.set(data[0]); newData.set(data[1], data[0].length); data[0] = newData; data.splice(1, 1); } // retrieve PTS/DTS from first fragment frag = data[0]; const pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2]; if (pesPrefix === 1) { pesLen = (frag[4] << 8) + frag[5]; // if PES parsed length is not zero and greater than total received length, stop parsing. PES might be truncated // minus 6 : PES header size if (pesLen && pesLen > stream.size - 6) { return null; } const pesFlags = frag[7]; if (pesFlags & 0xc0) { /* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html as PTS / DTS is 33 bit we cannot use bitwise operator in JS, as Bitwise operators treat their operands as a sequence of 32 bits */ pesPts = (frag[9] & 0x0e) * 536870912 + // 1 << 29 (frag[10] & 0xff) * 4194304 + // 1 << 22 (frag[11] & 0xfe) * 16384 + // 1 << 14 (frag[12] & 0xff) * 128 + // 1 << 7 (frag[13] & 0xfe) / 2; if (pesFlags & 0x40) { pesDts = (frag[14] & 0x0e) * 536870912 + // 1 << 29 (frag[15] & 0xff) * 4194304 + // 1 << 22 (frag[16] & 0xfe) * 16384 + // 1 << 14 (frag[17] & 0xff) * 128 + // 1 << 7 (frag[18] & 0xfe) / 2; if (pesPts - pesDts > 60 * 90000) { logger.warn(`${Math.round((pesPts - pesDts) / 90000)}s delta between PTS and DTS, align them`); pesPts = pesDts; } } else { pesDts = pesPts; } } pesHdrLen = frag[8]; // 9 bytes : 6 bytes for PES header + 3 bytes for PES extension let payloadStartOffset = pesHdrLen + 9; if (stream.size <= payloadStartOffset) { return null; } stream.size -= payloadStartOffset; // reassemble PES packet const pesData = new Uint8Array(stream.size); for (let j = 0, dataLen = data.length; j < dataLen; j++) { frag = data[j]; let len = frag.byteLength; if (payloadStartOffset) { if (payloadStartOffset > len) { // trim full frag if PES header bigger than frag payloadStartOffset -= len; continue; } else { // trim partial frag if PES header smaller than frag frag = frag.subarray(payloadStartOffset); len -= payloadStartOffset; payloadStartOffset = 0; } } pesData.set(frag, i); i += len; } if (pesLen) { // payload size : remove PES header + PES extension pesLen -= pesHdrLen + 3; } return { data: pesData, pts: pesPts, dts: pesDts, len: pesLen }; } return null; } function pushAccessUnit(avcSample, avcTrack) { if (avcSample.units.length && avcSample.frame) { // if sample does not have PTS/DTS, patch with last sample PTS/DTS if (avcSample.pts === undefined) { const samples = avcTrack.samples; const nbSamples = samples.length; if (nbSamples) { const lastSample = samples[nbSamples - 1]; avcSample.pts = lastSample.pts; avcSample.dts = lastSample.dts; } else { // dropping samples, no timestamp found avcTrack.dropped++; return; } } avcTrack.samples.push(avcSample); } if (avcSample.debug.length) { logger.log(avcSample.pts + '/' + avcSample.dts + ':' + avcSample.debug); } } /** * MP3 demuxer */ class MP3Demuxer extends BaseAudioDemuxer { resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) { super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration); this._audioTrack = { container: 'audio/mpeg', type: 'audio', id: 2, pid: -1, sequenceNumber: 0, segmentCodec: 'mp3', samples: [], manifestCodec: audioCodec, duration: trackDuration, inputTimeScale: 90000, dropped: 0 }; } static probe(data) { if (!data) { return false; } // check if data contains ID3 timestamp and MPEG sync word // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1 // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III) // More info http://www.mp3-tech.org/programmer/frame_header.html const id3Data = getID3Data(data, 0) || []; let offset = id3Data.length; for (let length = data.length; offset < length; offset++) { if (probe(data, offset)) { logger.log('MPEG Audio sync word found !'); return true; } } return false; } canParse(data, offset) { return canParse(data, offset); } appendFrame(track, data, offset) { if (this.basePTS === null) { return; } return appendFrame(track, data, offset, this.basePTS, this.frameIndex); } } /** * AAC helper */ class AAC$1 { static getSilentFrame(codec, channelCount) { switch (codec) { case 'mp4a.40.2': if (channelCount === 1) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]); } else if (channelCount === 2) { return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]); } else if (channelCount === 3) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]); } else if (channelCount === 4) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]); } else if (channelCount === 5) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]); } else if (channelCount === 6) { return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]); } break; // handle HE-AAC below (mp4a.40.5 / mp4a.40.29) default: if (channelCount === 1) { // ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } else if (channelCount === 2) { // ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } else if (channelCount === 3) { // ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]); } break; } return undefined; } } /** * Generate MP4 Box */ const UINT32_MAX = Math.pow(2, 32) - 1; class MP4 { static init() { MP4.types = { avc1: [], // codingname avcC: [], btrt: [], dinf: [], dref: [], esds: [], ftyp: [], hdlr: [], mdat: [], mdhd: [], mdia: [], mfhd: [], minf: [], moof: [], moov: [], mp4a: [], '.mp3': [], mvex: [], mvhd: [], pasp: [], sdtp: [], stbl: [], stco: [], stsc: [], stsd: [], stsz: [], stts: [], tfdt: [], tfhd: [], traf: [], trak: [], trun: [], trex: [], tkhd: [], vmhd: [], smhd: [] }; let i; for (i in MP4.types) { if (MP4.types.hasOwnProperty(i)) { MP4.types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)]; } } const videoHdlr = new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide' 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler' ]); const audioHdlr = new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, // pre_defined 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun' 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler' ]); MP4.HDLR_TYPES = { video: videoHdlr, audio: audioHdlr }; const dref = new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x01, // entry_count 0x00, 0x00, 0x00, 0x0c, // entry_size 0x75, 0x72, 0x6c, 0x20, // 'url' type 0x00, // version 0 0x00, 0x00, 0x01 // entry_flags ]); const stco = new Uint8Array([0x00, // version 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00 // entry_count ]); MP4.STTS = MP4.STSC = MP4.STCO = stco; MP4.STSZ = new Uint8Array([0x00, // version 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, // sample_size 0x00, 0x00, 0x00, 0x00 // sample_count ]); MP4.VMHD = new Uint8Array([0x00, // version 0x00, 0x00, 0x01, // flags 0x00, 0x00, // graphicsmode 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor ]); MP4.SMHD = new Uint8Array([0x00, // version 0x00, 0x00, 0x00, // flags 0x00, 0x00, // balance 0x00, 0x00 // reserved ]); MP4.STSD = new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x01]); // entry_count const majorBrand = new Uint8Array([105, 115, 111, 109]); // isom const avc1Brand = new Uint8Array([97, 118, 99, 49]); // avc1 const minorVersion = new Uint8Array([0, 0, 0, 1]); MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand); MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref)); } static box(type, ...payload) { let size = 8; let i = payload.length; const len = i; // calculate the total size we need to allocate while (i--) { size += payload[i].byteLength; } const result = new Uint8Array(size); result[0] = size >> 24 & 0xff; result[1] = size >> 16 & 0xff; result[2] = size >> 8 & 0xff; result[3] = size & 0xff; result.set(type, 4); // copy the payload into the result for (i = 0, size = 8; i < len; i++) { // copy payload[i] array @ offset size result.set(payload[i], size); size += payload[i].byteLength; } return result; } static hdlr(type) { return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]); } static mdat(data) { return MP4.box(MP4.types.mdat, data); } static mdhd(timescale, duration) { duration *= timescale; const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1)); const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1)); return MP4.box(MP4.types.mdhd, new Uint8Array([0x01, // version 1 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x55, 0xc4, // 'und' language (undetermined) 0x00, 0x00])); } static mdia(track) { return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track)); } static mfhd(sequenceNumber) { return MP4.box(MP4.types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags sequenceNumber >> 24, sequenceNumber >> 16 & 0xff, sequenceNumber >> 8 & 0xff, sequenceNumber & 0xff // sequence_number ])); } static minf(track) { if (track.type === 'audio') { return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track)); } else { return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track)); } } static moof(sn, baseMediaDecodeTime, track) { return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime)); } static moov(tracks) { let i = tracks.length; const boxes = []; while (i--) { boxes[i] = MP4.trak(tracks[i]); } return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks))); } static mvex(tracks) { let i = tracks.length; const boxes = []; while (i--) { boxes[i] = MP4.trex(tracks[i]); } return MP4.box.apply(null, [MP4.types.mvex, ...boxes]); } static mvhd(timescale, duration) { duration *= timescale; const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1)); const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1)); const bytes = new Uint8Array([0x01, // version 1 0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x01, 0x00, 0x00, // 1.0 rate 0x01, 0x00, // 1.0 volume 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined 0xff, 0xff, 0xff, 0xff // next_track_ID ]); return MP4.box(MP4.types.mvhd, bytes); } static sdtp(track) { const samples = track.samples || []; const bytes = new Uint8Array(4 + samples.length); let i; let flags; // leave the full box header (4 bytes) all zero // write the sample table for (i = 0; i < samples.length; i++) { flags = samples[i].flags; bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy; } return MP4.box(MP4.types.sdtp, bytes); } static stbl(track) { return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO)); } static avc1(track) { let sps = []; let pps = []; let i; let data; let len; // assemble the SPSs for (i = 0; i < track.sps.length; i++) { data = track.sps[i]; len = data.byteLength; sps.push(len >>> 8 & 0xff); sps.push(len & 0xff); // SPS sps = sps.concat(Array.prototype.slice.call(data)); } // assemble the PPSs for (i = 0; i < track.pps.length; i++) { data = track.pps[i]; len = data.byteLength; pps.push(len >>> 8 & 0xff); pps.push(len & 0xff); pps = pps.concat(Array.prototype.slice.call(data)); } const avcc = MP4.box(MP4.types.avcC, new Uint8Array([0x01, // version sps[3], // profile sps[4], // profile compat sps[5], // level 0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes 0xe0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets ].concat(sps).concat([track.pps.length // numOfPictureParameterSets ]).concat(pps))); // "PPS" const width = track.width; const height = track.height; const hSpacing = track.pixelRatio[0]; const vSpacing = track.pixelRatio[1]; return MP4.box(MP4.types.avc1, new Uint8Array([0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, // reserved 0x00, 0x01, // data_reference_index 0x00, 0x00, // pre_defined 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined width >> 8 & 0xff, width & 0xff, // width height >> 8 & 0xff, height & 0xff, // height 0x00, 0x48, 0x00, 0x00, // horizresolution 0x00, 0x48, 0x00, 0x00, // vertresolution 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x01, // frame_count 0x12, 0x64, 0x61, 0x69, 0x6c, // dailymotion/hls.js 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname 0x00, 0x18, // depth = 24 0x11, 0x11]), // pre_defined = -1 avcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate 0x00, 0x2d, 0xc6, 0xc0])), // avgBitrate MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24, // hSpacing hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24, // vSpacing vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff]))); } static esds(track) { const configlen = track.config.length; return new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags 0x03, // descriptor_type 0x17 + configlen, // length 0x00, 0x01, // es_id 0x00, // stream_priority 0x04, // descriptor_type 0x0f + configlen, // length 0x40, // codec : mpeg4_audio 0x15, // stream_type 0x00, 0x00, 0x00, // buffer_size 0x00, 0x00, 0x00, 0x00, // maxBitrate 0x00, 0x00, 0x00, 0x00, // avgBitrate 0x05 // descriptor_type ].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor } static mp4a(track) { const samplerate = track.samplerate; return MP4.box(MP4.types.mp4a, new Uint8Array([0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, // reserved 0x00, 0x01, // data_reference_index 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved 0x00, track.channelCount, // channelcount 0x00, 0x10, // sampleSize:16bits 0x00, 0x00, 0x00, 0x00, // reserved2 samplerate >> 8 & 0xff, samplerate & 0xff, // 0x00, 0x00]), MP4.box(MP4.types.esds, MP4.esds(track))); } static mp3(track) { const samplerate = track.samplerate; return MP4.box(MP4.types['.mp3'], new Uint8Array([0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, // reserved 0x00, 0x01, // data_reference_index 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved 0x00, track.channelCount, // channelcount 0x00, 0x10, // sampleSize:16bits 0x00, 0x00, 0x00, 0x00, // reserved2 samplerate >> 8 & 0xff, samplerate & 0xff, // 0x00, 0x00])); } static stsd(track) { if (track.type === 'audio') { if (track.segmentCodec === 'mp3' && track.codec === 'mp3') { return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp3(track)); } return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track)); } else { return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track)); } } static tkhd(track) { const id = track.id; const duration = track.duration * track.timescale; const width = track.width; const height = track.height; const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1)); const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1)); return MP4.box(MP4.types.tkhd, new Uint8Array([0x01, // version 1 0x00, 0x00, 0x07, // flags 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time id >> 24 & 0xff, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID 0x00, 0x00, 0x00, 0x00, // reserved upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, // layer 0x00, 0x00, // alternate_group 0x00, 0x00, // non-audio track volume 0x00, 0x00, // reserved 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix width >> 8 & 0xff, width & 0xff, 0x00, 0x00, // width height >> 8 & 0xff, height & 0xff, 0x00, 0x00 // height ])); } static traf(track, baseMediaDecodeTime) { const sampleDependencyTable = MP4.sdtp(track); const id = track.id; const upperWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1)); const lowerWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1)); return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff // track_ID ])), MP4.box(MP4.types.tfdt, new Uint8Array([0x01, // version 1 0x00, 0x00, 0x00, // flags upperWordBaseMediaDecodeTime >> 24, upperWordBaseMediaDecodeTime >> 16 & 0xff, upperWordBaseMediaDecodeTime >> 8 & 0xff, upperWordBaseMediaDecodeTime & 0xff, lowerWordBaseMediaDecodeTime >> 24, lowerWordBaseMediaDecodeTime >> 16 & 0xff, lowerWordBaseMediaDecodeTime >> 8 & 0xff, lowerWordBaseMediaDecodeTime & 0xff])), MP4.trun(track, sampleDependencyTable.length + 16 + // tfhd 20 + // tfdt 8 + // traf header 16 + // mfhd 8 + // moof header 8), // mdat header sampleDependencyTable); } /** * Generate a track box. * @param track a track definition */ static trak(track) { track.duration = track.duration || 0xffffffff; return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track)); } static trex(track) { const id = track.id; return MP4.box(MP4.types.trex, new Uint8Array([0x00, // version 0 0x00, 0x00, 0x00, // flags id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID 0x00, 0x00, 0x00, 0x01, // default_sample_description_index 0x00, 0x00, 0x00, 0x00, // default_sample_duration 0x00, 0x00, 0x00, 0x00, // default_sample_size 0x00, 0x01, 0x00, 0x01 // default_sample_flags ])); } static trun(track, offset) { const samples = track.samples || []; const len = samples.length; const arraylen = 12 + 16 * len; const array = new Uint8Array(arraylen); let i; let sample; let duration; let size; let flags; let cts; offset += 8 + arraylen; array.set([track.type === 'video' ? 0x01 : 0x00, // version 1 for video with signed-int sample_composition_time_offset 0x00, 0x0f, 0x01, // flags len >>> 24 & 0xff, len >>> 16 & 0xff, len >>> 8 & 0xff, len & 0xff, // sample_count offset >>> 24 & 0xff, offset >>> 16 & 0xff, offset >>> 8 & 0xff, offset & 0xff // data_offset ], 0); for (i = 0; i < len; i++) { sample = samples[i]; duration = sample.duration; size = sample.size; flags = sample.flags; cts = sample.cts; array.set([duration >>> 24 & 0xff, duration >>> 16 & 0xff, duration >>> 8 & 0xff, duration & 0xff, // sample_duration size >>> 24 & 0xff, size >>> 16 & 0xff, size >>> 8 & 0xff, size & 0xff, // sample_size flags.isLeading << 2 | flags.dependsOn, flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.paddingValue << 1 | flags.isNonSync, flags.degradPrio & 0xf0 << 8, flags.degradPrio & 0x0f, // sample_flags cts >>> 24 & 0xff, cts >>> 16 & 0xff, cts >>> 8 & 0xff, cts & 0xff // sample_composition_time_offset ], 12 + 16 * i); } return MP4.box(MP4.types.trun, array); } static initSegment(tracks) { if (!MP4.types) { MP4.init(); } const movie = MP4.moov(tracks); const result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength); result.set(MP4.FTYP); result.set(movie, MP4.FTYP.byteLength); return result; } } MP4.types = void 0; MP4.HDLR_TYPES = void 0; MP4.STTS = void 0; MP4.STSC = void 0; MP4.STCO = void 0; MP4.STSZ = void 0; MP4.VMHD = void 0; MP4.SMHD = void 0; MP4.STSD = void 0; MP4.FTYP = void 0; MP4.DINF = void 0; const MPEG_TS_CLOCK_FREQ_HZ = 90000; function toTimescaleFromBase(baseTime, destScale, srcBase = 1, round = false) { const result = baseTime * destScale * srcBase; // equivalent to `(value * scale) / (1 / base)` return round ? Math.round(result) : result; } function toTimescaleFromScale(baseTime, destScale, srcScale = 1, round = false) { return toTimescaleFromBase(baseTime, destScale, 1 / srcScale, round); } function toMsFromMpegTsClock(baseTime, round = false) { return toTimescaleFromBase(baseTime, 1000, 1 / MPEG_TS_CLOCK_FREQ_HZ, round); } function toMpegTsClockFromTimescale(baseTime, srcScale = 1) { return toTimescaleFromBase(baseTime, MPEG_TS_CLOCK_FREQ_HZ, 1 / srcScale); } const MAX_SILENT_FRAME_DURATION = 10 * 1000; // 10 seconds const AAC_SAMPLES_PER_FRAME = 1024; const MPEG_AUDIO_SAMPLE_PER_FRAME = 1152; let chromeVersion = null; let safariWebkitVersion = null; class MP4Remuxer { constructor(observer, config, typeSupported, vendor = '') { this.observer = void 0; this.config = void 0; this.typeSupported = void 0; this.ISGenerated = false; this._initPTS = null; this._initDTS = null; this.nextAvcDts = null; this.nextAudioPts = null; this.videoSampleDuration = null; this.isAudioContiguous = false; this.isVideoContiguous = false; this.observer = observer; this.config = config; this.typeSupported = typeSupported; this.ISGenerated = false; if (chromeVersion === null) { const userAgent = navigator.userAgent || ''; const result = userAgent.match(/Chrome\/(\d+)/i); chromeVersion = result ? parseInt(result[1]) : 0; } if (safariWebkitVersion === null) { const result = navigator.userAgent.match(/Safari\/(\d+)/i); safariWebkitVersion = result ? parseInt(result[1]) : 0; } } destroy() {} resetTimeStamp(defaultTimeStamp) { logger.log('[mp4-remuxer]: initPTS & initDTS reset'); this._initPTS = this._initDTS = defaultTimeStamp; } resetNextTimestamp() { logger.log('[mp4-remuxer]: reset next timestamp'); this.isVideoContiguous = false; this.isAudioContiguous = false; } resetInitSegment() { logger.log('[mp4-remuxer]: ISGenerated flag reset'); this.ISGenerated = false; } getVideoStartPts(videoSamples) { let rolloverDetected = false; const startPTS = videoSamples.reduce((minPTS, sample) => { const delta = sample.pts - minPTS; if (delta < -4294967296) { // 2^32, see PTSNormalize for reasoning, but we're hitting a rollover here, and we don't want that to impact the timeOffset calculation rolloverDetected = true; return normalizePts(minPTS, sample.pts); } else if (delta > 0) { return minPTS; } else { return sample.pts; } }, videoSamples[0].pts); if (rolloverDetected) { logger.debug('PTS rollover detected'); } return startPTS; } remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, flush, playlistType) { let video; let audio; let initSegment; let text; let id3; let independent; let audioTimeOffset = timeOffset; let videoTimeOffset = timeOffset; // If we're remuxing audio and video progressively, wait until we've received enough samples for each track before proceeding. // This is done to synchronize the audio and video streams. We know if the current segment will have samples if the "pid" // parameter is greater than -1. The pid is set when the PMT is parsed, which contains the tracks list. // However, if the initSegment has already been generated, or we've reached the end of a segment (flush), // then we can remux one track without waiting for the other. const hasAudio = audioTrack.pid > -1; const hasVideo = videoTrack.pid > -1; const length = videoTrack.samples.length; const enoughAudioSamples = audioTrack.samples.length > 0; const enoughVideoSamples = flush && length > 0 || length > 1; const canRemuxAvc = (!hasAudio || enoughAudioSamples) && (!hasVideo || enoughVideoSamples) || this.ISGenerated || flush; if (canRemuxAvc) { if (!this.ISGenerated) { initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset); } const isVideoContiguous = this.isVideoContiguous; let firstKeyFrameIndex = -1; let firstKeyFramePTS; if (enoughVideoSamples) { firstKeyFrameIndex = findKeyframeIndex(videoTrack.samples); if (!isVideoContiguous && this.config.forceKeyFrameOnDiscontinuity) { independent = true; if (firstKeyFrameIndex > 0) { logger.warn(`[mp4-remuxer]: Dropped ${firstKeyFrameIndex} out of ${length} video samples due to a missing keyframe`); const startPTS = this.getVideoStartPts(videoTrack.samples); videoTrack.samples = videoTrack.samples.slice(firstKeyFrameIndex); videoTrack.dropped += firstKeyFrameIndex; videoTimeOffset += (videoTrack.samples[0].pts - startPTS) / videoTrack.inputTimeScale; firstKeyFramePTS = videoTimeOffset; } else if (firstKeyFrameIndex === -1) { logger.warn(`[mp4-remuxer]: No keyframe found out of ${length} video samples`); independent = false; } } } if (this.ISGenerated) { if (enoughAudioSamples && enoughVideoSamples) { // timeOffset is expected to be the offset of the first timestamp of this fragment (first DTS) // if first audio DTS is not aligned with first video DTS then we need to take that into account // when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small // drift between audio and video streams const startPTS = this.getVideoStartPts(videoTrack.samples); const tsDelta = normalizePts(audioTrack.samples[0].pts, startPTS) - startPTS; const audiovideoTimestampDelta = tsDelta / videoTrack.inputTimeScale; audioTimeOffset += Math.max(0, audiovideoTimestampDelta); videoTimeOffset += Math.max(0, -audiovideoTimestampDelta); } // Purposefully remuxing audio before video, so that remuxVideo can use nextAudioPts, which is calculated in remuxAudio. if (enoughAudioSamples) { // if initSegment was generated without audio samples, regenerate it again if (!audioTrack.samplerate) { logger.warn('[mp4-remuxer]: regenerate InitSegment as audio detected'); initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset); } audio = this.remuxAudio(audioTrack, audioTimeOffset, this.isAudioContiguous, accurateTimeOffset, hasVideo || enoughVideoSamples || playlistType === PlaylistLevelType.AUDIO ? videoTimeOffset : undefined); if (enoughVideoSamples) { const audioTrackLength = audio ? audio.endPTS - audio.startPTS : 0; // if initSegment was generated without video samples, regenerate it again if (!videoTrack.inputTimeScale) { logger.warn('[mp4-remuxer]: regenerate InitSegment as video detected'); initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset); } video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength); } } else if (enoughVideoSamples) { video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0); } if (video) { video.firstKeyFrame = firstKeyFrameIndex; video.independent = firstKeyFrameIndex !== -1; video.firstKeyFramePTS = firstKeyFramePTS; } } } // Allow ID3 and text to remux, even if more audio/video samples are required if (this.ISGenerated && this._initPTS && this._initDTS) { if (id3Track.samples.length) { id3 = flushTextTrackMetadataCueSamples(id3Track, timeOffset, this._initPTS, this._initDTS); } if (textTrack.samples.length) { text = flushTextTrackUserdataCueSamples(textTrack, timeOffset, this._initPTS); } } return { audio, video, initSegment, independent, text, id3 }; } generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset) { const audioSamples = audioTrack.samples; const videoSamples = videoTrack.samples; const typeSupported = this.typeSupported; const tracks = {}; const _initPTS = this._initPTS; let computePTSDTS = !_initPTS || accurateTimeOffset; let container = 'audio/mp4'; let initPTS; let initDTS; let timescale; if (computePTSDTS) { initPTS = initDTS = Infinity; } if (audioTrack.config && audioSamples.length) { // let's use audio sampling rate as MP4 time scale. // rationale is that there is a integer nb of audio frames per audio sample (1024 for AAC) // using audio sampling rate here helps having an integer MP4 frame duration // this avoids potential rounding issue and AV sync issue audioTrack.timescale = audioTrack.samplerate; switch (audioTrack.segmentCodec) { case 'mp3': if (typeSupported.mpeg) { // Chrome and Safari container = 'audio/mpeg'; audioTrack.codec = ''; } else if (typeSupported.mp3) { // Firefox audioTrack.codec = 'mp3'; } break; } tracks.audio = { id: 'audio', container: container, codec: audioTrack.codec, initSegment: audioTrack.segmentCodec === 'mp3' && typeSupported.mpeg ? new Uint8Array(0) : MP4.initSegment([audioTrack]), metadata: { channelCount: audioTrack.channelCount } }; if (computePTSDTS) { timescale = audioTrack.inputTimeScale; if (!_initPTS || timescale !== _initPTS.timescale) { // remember first PTS of this demuxing context. for audio, PTS = DTS initPTS = initDTS = audioSamples[0].pts - Math.round(timescale * timeOffset); } else { computePTSDTS = false; } } } if (videoTrack.sps && videoTrack.pps && videoSamples.length) { // let's use input time scale as MP4 video timescale // we use input time scale straight away to avoid rounding issues on frame duration / cts computation videoTrack.timescale = videoTrack.inputTimeScale; tracks.video = { id: 'main', container: 'video/mp4', codec: videoTrack.codec, initSegment: MP4.initSegment([videoTrack]), metadata: { width: videoTrack.width, height: videoTrack.height } }; if (computePTSDTS) { timescale = videoTrack.inputTimeScale; if (!_initPTS || timescale !== _initPTS.timescale) { const startPTS = this.getVideoStartPts(videoSamples); const startOffset = Math.round(timescale * timeOffset); initDTS = Math.min(initDTS, normalizePts(videoSamples[0].dts, startPTS) - startOffset); initPTS = Math.min(initPTS, startPTS - startOffset); } else { computePTSDTS = false; } } } if (Object.keys(tracks).length) { this.ISGenerated = true; if (computePTSDTS) { this._initPTS = { baseTime: initPTS, timescale: timescale }; this._initDTS = { baseTime: initDTS, timescale: timescale }; } else { initPTS = timescale = undefined; } return { tracks, initPTS, timescale }; } } remuxVideo(track, timeOffset, contiguous, audioTrackLength) { const timeScale = track.inputTimeScale; const inputSamples = track.samples; const outputSamples = []; const nbSamples = inputSamples.length; const initPTS = this._initPTS; let nextAvcDts = this.nextAvcDts; let offset = 8; let mp4SampleDuration = this.videoSampleDuration; let firstDTS; let lastDTS; let minPTS = Number.POSITIVE_INFINITY; let maxPTS = Number.NEGATIVE_INFINITY; let sortSamples = false; // if parsed fragment is contiguous with last one, let's use last DTS value as reference if (!contiguous || nextAvcDts === null) { const pts = timeOffset * timeScale; const cts = inputSamples[0].pts - normalizePts(inputSamples[0].dts, inputSamples[0].pts); // if not contiguous, let's use target timeOffset nextAvcDts = pts - cts; } // PTS is coded on 33bits, and can loop from -2^32 to 2^32 // PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value const initTime = initPTS.baseTime * timeScale / initPTS.timescale; for (let i = 0; i < nbSamples; i++) { const sample = inputSamples[i]; sample.pts = normalizePts(sample.pts - initTime, nextAvcDts); sample.dts = normalizePts(sample.dts - initTime, nextAvcDts); if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) { sortSamples = true; } } // sort video samples by DTS then PTS then demux id order if (sortSamples) { inputSamples.sort(function (a, b) { const deltadts = a.dts - b.dts; const deltapts = a.pts - b.pts; return deltadts || deltapts; }); } // Get first/last DTS firstDTS = inputSamples[0].dts; lastDTS = inputSamples[inputSamples.length - 1].dts; // Sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS // set this constant duration as being the avg delta between consecutive DTS. const inputDuration = lastDTS - firstDTS; const averageSampleDuration = inputDuration ? Math.round(inputDuration / (nbSamples - 1)) : mp4SampleDuration || track.inputTimeScale / 30; // if fragment are contiguous, detect hole/overlapping between fragments if (contiguous) { // check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole) const delta = firstDTS - nextAvcDts; const foundHole = delta > averageSampleDuration; const foundOverlap = delta < -1; if (foundHole || foundOverlap) { if (foundHole) { logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected, filling it`); } else { logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected`); } if (!foundOverlap || nextAvcDts >= inputSamples[0].pts) { firstDTS = nextAvcDts; const firstPTS = inputSamples[0].pts - delta; inputSamples[0].dts = firstDTS; inputSamples[0].pts = firstPTS; logger.log(`Video: First PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`); } } } firstDTS = Math.max(0, firstDTS); let nbNalu = 0; let naluLen = 0; for (let i = 0; i < nbSamples; i++) { // compute total/avc sample length and nb of NAL units const sample = inputSamples[i]; const units = sample.units; const nbUnits = units.length; let sampleLen = 0; for (let j = 0; j < nbUnits; j++) { sampleLen += units[j].data.length; } naluLen += sampleLen; nbNalu += nbUnits; sample.length = sampleLen; // ensure sample monotonic DTS sample.dts = Math.max(sample.dts, firstDTS); minPTS = Math.min(sample.pts, minPTS); maxPTS = Math.max(sample.pts, maxPTS); } lastDTS = inputSamples[nbSamples - 1].dts; /* concatenate the video data and construct the mdat in place (need 8 more bytes to fill length and mpdat type) */ const mdatSize = naluLen + 4 * nbNalu + 8; let mdat; try { mdat = new Uint8Array(mdatSize); } catch (err) { this.observer.emit(Events.ERROR, Events.ERROR, { type: ErrorTypes.MUX_ERROR, details: ErrorDetails.REMUX_ALLOC_ERROR, fatal: false, error: err, bytes: mdatSize, reason: `fail allocating video mdat ${mdatSize}` }); return; } const view = new DataView(mdat.buffer); view.setUint32(0, mdatSize); mdat.set(MP4.types.mdat, 4); let stretchedLastFrame = false; let minDtsDelta = Number.POSITIVE_INFINITY; let minPtsDelta = Number.POSITIVE_INFINITY; let maxDtsDelta = Number.NEGATIVE_INFINITY; let maxPtsDelta = Number.NEGATIVE_INFINITY; for (let i = 0; i < nbSamples; i++) { const avcSample = inputSamples[i]; const avcSampleUnits = avcSample.units; let mp4SampleLength = 0; // convert NALU bitstream to MP4 format (prepend NALU with size field) for (let j = 0, nbUnits = avcSampleUnits.length; j < nbUnits; j++) { const unit = avcSampleUnits[j]; const unitData = unit.data; const unitDataLen = unit.data.byteLength; view.setUint32(offset, unitDataLen); offset += 4; mdat.set(unitData, offset); offset += unitDataLen; mp4SampleLength += 4 + unitDataLen; } // expected sample duration is the Decoding Timestamp diff of consecutive samples let ptsDelta; if (i < nbSamples - 1) { mp4SampleDuration = inputSamples[i + 1].dts - avcSample.dts; ptsDelta = inputSamples[i + 1].pts - avcSample.pts; } else { const config = this.config; const lastFrameDuration = i > 0 ? avcSample.dts - inputSamples[i - 1].dts : averageSampleDuration; ptsDelta = i > 0 ? avcSample.pts - inputSamples[i - 1].pts : averageSampleDuration; if (config.stretchShortVideoTrack && this.nextAudioPts !== null) { // In some cases, a segment's audio track duration may exceed the video track duration. // Since we've already remuxed audio, and we know how long the audio track is, we look to // see if the delta to the next segment is longer than maxBufferHole. // If so, playback would potentially get stuck, so we artificially inflate // the duration of the last frame to minimize any potential gap between segments. const gapTolerance = Math.floor(config.maxBufferHole * timeScale); const deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - avcSample.pts; if (deltaToFrameEnd > gapTolerance) { // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video // frame overlap. maxBufferHole should be >> lastFrameDuration anyway. mp4SampleDuration = deltaToFrameEnd - lastFrameDuration; if (mp4SampleDuration < 0) { mp4SampleDuration = lastFrameDuration; } else { stretchedLastFrame = true; } logger.log(`[mp4-remuxer]: It is approximately ${deltaToFrameEnd / 90} ms to the next segment; using duration ${mp4SampleDuration / 90} ms for the last video frame.`); } else { mp4SampleDuration = lastFrameDuration; } } else { mp4SampleDuration = lastFrameDuration; } } const compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts); minDtsDelta = Math.min(minDtsDelta, mp4SampleDuration); maxDtsDelta = Math.max(maxDtsDelta, mp4SampleDuration); minPtsDelta = Math.min(minPtsDelta, ptsDelta); maxPtsDelta = Math.max(maxPtsDelta, ptsDelta); outputSamples.push(new Mp4Sample(avcSample.key, mp4SampleDuration, mp4SampleLength, compositionTimeOffset)); } if (outputSamples.length) { if (chromeVersion) { if (chromeVersion < 70) { // Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue // https://code.google.com/p/chromium/issues/detail?id=229412 const flags = outputSamples[0].flags; flags.dependsOn = 2; flags.isNonSync = 0; } } else if (safariWebkitVersion) { // Fix for "CNN special report, with CC" in test-streams (Safari browser only) // Ignore DTS when frame durations are irregular. Safari MSE does not handle this leading to gaps. if (maxPtsDelta - minPtsDelta < maxDtsDelta - minDtsDelta && averageSampleDuration / maxDtsDelta < 0.025 && outputSamples[0].cts === 0) { logger.warn('Found irregular gaps in sample duration. Using PTS instead of DTS to determine MP4 sample duration.'); let dts = firstDTS; for (let i = 0, len = outputSamples.length; i < len; i++) { const nextDts = dts + outputSamples[i].duration; const pts = dts + outputSamples[i].cts; if (i < len - 1) { const nextPts = nextDts + outputSamples[i + 1].cts; outputSamples[i].duration = nextPts - pts; } else { outputSamples[i].duration = i ? outputSamples[i - 1].duration : averageSampleDuration; } outputSamples[i].cts = 0; dts = nextDts; } } } } // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale) mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration; this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration; this.videoSampleDuration = mp4SampleDuration; this.isVideoContiguous = true; const moof = MP4.moof(track.sequenceNumber++, firstDTS, _extends({}, track, { samples: outputSamples })); const type = 'video'; const data = { data1: moof, data2: mdat, startPTS: minPTS / timeScale, endPTS: (maxPTS + mp4SampleDuration) / timeScale, startDTS: firstDTS / timeScale, endDTS: nextAvcDts / timeScale, type, hasAudio: false, hasVideo: true, nb: outputSamples.length, dropped: track.dropped }; track.samples = []; track.dropped = 0; return data; } remuxAudio(track, timeOffset, contiguous, accurateTimeOffset, videoTimeOffset) { const inputTimeScale = track.inputTimeScale; const mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale; const scaleFactor = inputTimeScale / mp4timeScale; const mp4SampleDuration = track.segmentCodec === 'aac' ? AAC_SAMPLES_PER_FRAME : MPEG_AUDIO_SAMPLE_PER_FRAME; const inputSampleDuration = mp4SampleDuration * scaleFactor; const initPTS = this._initPTS; const rawMPEG = track.segmentCodec === 'mp3' && this.typeSupported.mpeg; const outputSamples = []; const alignedWithVideo = videoTimeOffset !== undefined; let inputSamples = track.samples; let offset = rawMPEG ? 0 : 8; let nextAudioPts = this.nextAudioPts || -1; // window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]); // for audio samples, also consider consecutive fragments as being contiguous (even if a level switch occurs), // for sake of clarity: // consecutive fragments are frags with // - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR // - less than 20 audio frames distance // contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1) // this helps ensuring audio continuity // and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame const timeOffsetMpegTS = timeOffset * inputTimeScale; const initTime = initPTS.baseTime * inputTimeScale / initPTS.timescale; this.isAudioContiguous = contiguous = contiguous || inputSamples.length && nextAudioPts > 0 && (accurateTimeOffset && Math.abs(timeOffsetMpegTS - nextAudioPts) < 9000 || Math.abs(normalizePts(inputSamples[0].pts - initTime, timeOffsetMpegTS) - nextAudioPts) < 20 * inputSampleDuration); // compute normalized PTS inputSamples.forEach(function (sample) { sample.pts = normalizePts(sample.pts - initTime, timeOffsetMpegTS); }); if (!contiguous || nextAudioPts < 0) { // filter out sample with negative PTS that are not playable anyway // if we don't remove these negative samples, they will shift all audio samples forward. // leading to audio overlap between current / next fragment inputSamples = inputSamples.filter(sample => sample.pts >= 0); // in case all samples have negative PTS, and have been filtered out, return now if (!inputSamples.length) { return; } if (videoTimeOffset === 0) { // Set the start to 0 to match video so that start gaps larger than inputSampleDuration are filled with silence nextAudioPts = 0; } else if (accurateTimeOffset && !alignedWithVideo) { // When not seeking, not live, and LevelDetails.PTSKnown, use fragment start as predicted next audio PTS nextAudioPts = Math.max(0, timeOffsetMpegTS); } else { // if frags are not contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS nextAudioPts = inputSamples[0].pts; } } // If the audio track is missing samples, the frames seem to get "left-shifted" within the // resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment. // In an effort to prevent this from happening, we inject frames here where there are gaps. // When possible, we inject a silent frame; when that's not possible, we duplicate the last // frame. if (track.segmentCodec === 'aac') { const maxAudioFramesDrift = this.config.maxAudioFramesDrift; for (let i = 0, nextPts = nextAudioPts; i < inputSamples.length; i++) { // First, let's see how far off this frame is from where we expect it to be const sample = inputSamples[i]; const pts = sample.pts; const delta = pts - nextPts; const duration = Math.abs(1000 * delta / inputTimeScale); // When remuxing with video, if we're overlapping by more than a duration, drop this sample to stay in sync if (delta <= -maxAudioFramesDrift * inputSampleDuration && alignedWithVideo) { if (i === 0) { logger.warn(`Audio frame @ ${(pts / inputTimeScale).toFixed(3)}s overlaps nextAudioPts by ${Math.round(1000 * delta / inputTimeScale)} ms.`); this.nextAudioPts = nextAudioPts = nextPts = pts; } } // eslint-disable-line brace-style // Insert missing frames if: // 1: We're more than maxAudioFramesDrift frame away // 2: Not more than MAX_SILENT_FRAME_DURATION away // 3: currentTime (aka nextPtsNorm) is not 0 // 4: remuxing with video (videoTimeOffset !== undefined) else if (delta >= maxAudioFramesDrift * inputSampleDuration && duration < MAX_SILENT_FRAME_DURATION && alignedWithVideo) { let missing = Math.round(delta / inputSampleDuration); // Adjust nextPts so that silent samples are aligned with media pts. This will prevent media samples from // later being shifted if nextPts is based on timeOffset and delta is not a multiple of inputSampleDuration. nextPts = pts - missing * inputSampleDuration; if (nextPts < 0) { missing--; nextPts += inputSampleDuration; } if (i === 0) { this.nextAudioPts = nextAudioPts = nextPts; } logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`); for (let j = 0; j < missing; j++) { const newStamp = Math.max(nextPts, 0); let fillFrame = AAC$1.getSilentFrame(track.manifestCodec || track.codec, track.channelCount); if (!fillFrame) { logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.'); fillFrame = sample.unit.subarray(); } inputSamples.splice(i, 0, { unit: fillFrame, pts: newStamp }); nextPts += inputSampleDuration; i++; } } sample.pts = nextPts; nextPts += inputSampleDuration; } } let firstPTS = null; let lastPTS = null; let mdat; let mdatSize = 0; let sampleLength = inputSamples.length; while (sampleLength--) { mdatSize += inputSamples[sampleLength].unit.byteLength; } for (let j = 0, _nbSamples = inputSamples.length; j < _nbSamples; j++) { const audioSample = inputSamples[j]; const unit = audioSample.unit; let pts = audioSample.pts; if (lastPTS !== null) { // If we have more than one sample, set the duration of the sample to the "real" duration; the PTS diff with // the previous sample const prevSample = outputSamples[j - 1]; prevSample.duration = Math.round((pts - lastPTS) / scaleFactor); } else { if (contiguous && track.segmentCodec === 'aac') { // set PTS/DTS to expected PTS/DTS pts = nextAudioPts; } // remember first PTS of our audioSamples firstPTS = pts; if (mdatSize > 0) { /* concatenate the audio data and construct the mdat in place (need 8 more bytes to fill length and mdat type) */ mdatSize += offset; try { mdat = new Uint8Array(mdatSize); } catch (err) { this.observer.emit(Events.ERROR, Events.ERROR, { type: ErrorTypes.MUX_ERROR, details: ErrorDetails.REMUX_ALLOC_ERROR, fatal: false, error: err, bytes: mdatSize, reason: `fail allocating audio mdat ${mdatSize}` }); return; } if (!rawMPEG) { const view = new DataView(mdat.buffer); view.setUint32(0, mdatSize); mdat.set(MP4.types.mdat, 4); } } else { // no audio samples return; } } mdat.set(unit, offset); const unitLen = unit.byteLength; offset += unitLen; // Default the sample's duration to the computed mp4SampleDuration, which will either be 1024 for AAC or 1152 for MPEG // In the case that we have 1 sample, this will be the duration. If we have more than one sample, the duration // becomes the PTS diff with the previous sample outputSamples.push(new Mp4Sample(true, mp4SampleDuration, unitLen, 0)); lastPTS = pts; } // We could end up with no audio samples if all input samples were overlapping with the previously remuxed ones const nbSamples = outputSamples.length; if (!nbSamples) { return; } // The next audio sample PTS should be equal to last sample PTS + duration const lastSample = outputSamples[outputSamples.length - 1]; this.nextAudioPts = nextAudioPts = lastPTS + scaleFactor * lastSample.duration; // Set the track samples from inputSamples to outputSamples before remuxing const moof = rawMPEG ? new Uint8Array(0) : MP4.moof(track.sequenceNumber++, firstPTS / scaleFactor, _extends({}, track, { samples: outputSamples })); // Clear the track samples. This also clears the samples array in the demuxer, since the reference is shared track.samples = []; const start = firstPTS / inputTimeScale; const end = nextAudioPts / inputTimeScale; const type = 'audio'; const audioData = { data1: moof, data2: mdat, startPTS: start, endPTS: end, startDTS: start, endDTS: end, type, hasAudio: true, hasVideo: false, nb: nbSamples }; this.isAudioContiguous = true; return audioData; } remuxEmptyAudio(track, timeOffset, contiguous, videoData) { const inputTimeScale = track.inputTimeScale; const mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale; const scaleFactor = inputTimeScale / mp4timeScale; const nextAudioPts = this.nextAudioPts; // sync with video's timestamp const initDTS = this._initDTS; const init90kHz = initDTS.baseTime * 90000 / initDTS.timescale; const startDTS = (nextAudioPts !== null ? nextAudioPts : videoData.startDTS * inputTimeScale) + init90kHz; const endDTS = videoData.endDTS * inputTimeScale + init90kHz; // one sample's duration value const frameDuration = scaleFactor * AAC_SAMPLES_PER_FRAME; // samples count of this segment's duration const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration); // silent frame const silentFrame = AAC$1.getSilentFrame(track.manifestCodec || track.codec, track.channelCount); logger.warn('[mp4-remuxer]: remux empty Audio'); // Can't remux if we can't generate a silent frame... if (!silentFrame) { logger.trace('[mp4-remuxer]: Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec'); return; } const samples = []; for (let i = 0; i < nbSamples; i++) { const stamp = startDTS + i * frameDuration; samples.push({ unit: silentFrame, pts: stamp, dts: stamp }); } track.samples = samples; return this.remuxAudio(track, timeOffset, contiguous, false); } } function normalizePts(value, reference) { let offset; if (reference === null) { return value; } if (reference < value) { // - 2^33 offset = -8589934592; } else { // + 2^33 offset = 8589934592; } /* PTS is 33bit (from 0 to 2^33 -1) if diff between value and reference is bigger than half of the amplitude (2^32) then it means that PTS looping occured. fill the gap */ while (Math.abs(value - reference) > 4294967296) { value += offset; } return value; } function findKeyframeIndex(samples) { for (let i = 0; i < samples.length; i++) { if (samples[i].key) { return i; } } return -1; } function flushTextTrackMetadataCueSamples(track, timeOffset, initPTS, initDTS) { const length = track.samples.length; if (!length) { return; } const inputTimeScale = track.inputTimeScale; for (let index = 0; index < length; index++) { const sample = track.samples[index]; // setting id3 pts, dts to relative time // using this._initPTS and this._initDTS to calculate relative time sample.pts = normalizePts(sample.pts - initPTS.baseTime * inputTimeScale / initPTS.timescale, timeOffset * inputTimeScale) / inputTimeScale; sample.dts = normalizePts(sample.dts - initDTS.baseTime * inputTimeScale / initDTS.timescale, timeOffset * inputTimeScale) / inputTimeScale; } const samples = track.samples; track.samples = []; return { samples }; } function flushTextTrackUserdataCueSamples(track, timeOffset, initPTS) { const length = track.samples.length; if (!length) { return; } const inputTimeScale = track.inputTimeScale; for (let index = 0; index < length; index++) { const sample = track.samples[index]; // setting text pts, dts to relative time // using this._initPTS and this._initDTS to calculate relative time sample.pts = normalizePts(sample.pts - initPTS.baseTime * inputTimeScale / initPTS.timescale, timeOffset * inputTimeScale) / inputTimeScale; } track.samples.sort((a, b) => a.pts - b.pts); const samples = track.samples; track.samples = []; return { samples }; } class Mp4Sample { constructor(isKeyframe, duration, size, cts) { this.size = void 0; this.duration = void 0; this.cts = void 0; this.flags = void 0; this.duration = duration; this.size = size; this.cts = cts; this.flags = new Mp4SampleFlags(isKeyframe); } } class Mp4SampleFlags { constructor(isKeyframe) { this.isLeading = 0; this.isDependedOn = 0; this.hasRedundancy = 0; this.degradPrio = 0; this.dependsOn = 1; this.isNonSync = 1; this.dependsOn = isKeyframe ? 2 : 1; this.isNonSync = isKeyframe ? 0 : 1; } } class PassThroughRemuxer { constructor() { this.emitInitSegment = false; this.audioCodec = void 0; this.videoCodec = void 0; this.initData = void 0; this.initPTS = null; this.initTracks = void 0; this.lastEndTime = null; } destroy() {} resetTimeStamp(defaultInitPTS) { this.initPTS = defaultInitPTS; this.lastEndTime = null; } resetNextTimestamp() { this.lastEndTime = null; } resetInitSegment(initSegment, audioCodec, videoCodec, decryptdata) { this.audioCodec = audioCodec; this.videoCodec = videoCodec; this.generateInitSegment(patchEncyptionData(initSegment, decryptdata)); this.emitInitSegment = true; } generateInitSegment(initSegment) { let { audioCodec, videoCodec } = this; if (!(initSegment != null && initSegment.byteLength)) { this.initTracks = undefined; this.initData = undefined; return; } const initData = this.initData = parseInitSegment(initSegment); // Get codec from initSegment or fallback to default if (!audioCodec) { audioCodec = getParsedTrackCodec(initData.audio, ElementaryStreamTypes.AUDIO); } if (!videoCodec) { videoCodec = getParsedTrackCodec(initData.video, ElementaryStreamTypes.VIDEO); } const tracks = {}; if (initData.audio && initData.video) { tracks.audiovideo = { container: 'video/mp4', codec: audioCodec + ',' + videoCodec, initSegment, id: 'main' }; } else if (initData.audio) { tracks.audio = { container: 'audio/mp4', codec: audioCodec, initSegment, id: 'audio' }; } else if (initData.video) { tracks.video = { container: 'video/mp4', codec: videoCodec, initSegment, id: 'main' }; } else { logger.warn('[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.'); } this.initTracks = tracks; } remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset) { var _initData, _initData2; let { initPTS, lastEndTime } = this; const result = { audio: undefined, video: undefined, text: textTrack, id3: id3Track, initSegment: undefined }; // If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the // lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update // the media duration (which is what timeOffset is provided as) before we need to process the next chunk. if (!isFiniteNumber(lastEndTime)) { lastEndTime = this.lastEndTime = timeOffset || 0; } // The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only // audio or video (or both); adding it to video was an arbitrary choice. const data = videoTrack.samples; if (!(data != null && data.length)) { return result; } const initSegment = { initPTS: undefined, timescale: 1 }; let initData = this.initData; if (!((_initData = initData) != null && _initData.length)) { this.generateInitSegment(data); initData = this.initData; } if (!((_initData2 = initData) != null && _initData2.length)) { // We can't remux if the initSegment could not be generated logger.warn('[passthrough-remuxer.ts]: Failed to generate initSegment.'); return result; } if (this.emitInitSegment) { initSegment.tracks = this.initTracks; this.emitInitSegment = false; } const duration = getDuration(data, initData); const startDTS = getStartDTS(initData, data); const decodeTime = startDTS === null ? timeOffset : startDTS; if (isInvalidInitPts(initPTS, decodeTime, timeOffset, duration) || initSegment.timescale !== initPTS.timescale && accurateTimeOffset) { initSegment.initPTS = decodeTime - timeOffset; if (initPTS && initPTS.timescale === 1) { logger.warn(`Adjusting initPTS by ${initSegment.initPTS - initPTS.baseTime}`); } this.initPTS = initPTS = { baseTime: initSegment.initPTS, timescale: 1 }; } const startTime = audioTrack ? decodeTime - initPTS.baseTime / initPTS.timescale : lastEndTime; const endTime = startTime + duration; offsetStartDTS(initData, data, initPTS.baseTime / initPTS.timescale); if (duration > 0) { this.lastEndTime = endTime; } else { logger.warn('Duration parsed from mp4 should be greater than zero'); this.resetNextTimestamp(); } const hasAudio = !!initData.audio; const hasVideo = !!initData.video; let type = ''; if (hasAudio) { type += 'audio'; } if (hasVideo) { type += 'video'; } const track = { data1: data, startPTS: startTime, startDTS: startTime, endPTS: endTime, endDTS: endTime, type, hasAudio, hasVideo, nb: 1, dropped: 0 }; result.audio = track.type === 'audio' ? track : undefined; result.video = track.type !== 'audio' ? track : undefined; result.initSegment = initSegment; result.id3 = flushTextTrackMetadataCueSamples(id3Track, timeOffset, initPTS, initPTS); if (textTrack.samples.length) { result.text = flushTextTrackUserdataCueSamples(textTrack, timeOffset, initPTS); } return result; } } function isInvalidInitPts(initPTS, startDTS, timeOffset, duration) { if (initPTS === null) { return true; } // InitPTS is invalid when distance from program would be more than segment duration or a minimum of one second const minDuration = Math.max(duration, 1); const startTime = startDTS - initPTS.baseTime / initPTS.timescale; return Math.abs(startTime - timeOffset) > minDuration; } function getParsedTrackCodec(track, type) { const parsedCodec = track == null ? void 0 : track.codec; if (parsedCodec && parsedCodec.length > 4) { return parsedCodec; } // Since mp4-tools cannot parse full codec string (see 'TODO: Parse codec details'... in mp4-tools) // Provide defaults based on codec type // This allows for some playback of some fmp4 playlists without CODECS defined in manifest if (parsedCodec === 'hvc1' || parsedCodec === 'hev1') { return 'hvc1.1.6.L120.90'; } if (parsedCodec === 'av01') { return 'av01.0.04M.08'; } if (parsedCodec === 'avc1' || type === ElementaryStreamTypes.VIDEO) { return 'avc1.42e01e'; } return 'mp4a.40.5'; } let now; // performance.now() not available on WebWorker, at least on Safari Desktop try { now = self.performance.now.bind(self.performance); } catch (err) { logger.debug('Unable to use Performance API on this environment'); now = typeof self !== 'undefined' && self.Date.now; } const muxConfig = [{ demux: MP4Demuxer, remux: PassThroughRemuxer }, { demux: TSDemuxer, remux: MP4Remuxer }, { demux: AACDemuxer, remux: MP4Remuxer }, { demux: MP3Demuxer, remux: MP4Remuxer }]; class Transmuxer$1 { constructor(observer, typeSupported, config, vendor, id) { this.async = false; this.observer = void 0; this.typeSupported = void 0; this.config = void 0; this.vendor = void 0; this.id = void 0; this.demuxer = void 0; this.remuxer = void 0; this.decrypter = void 0; this.probe = void 0; this.decryptionPromise = null; this.transmuxConfig = void 0; this.currentTransmuxState = void 0; this.observer = observer; this.typeSupported = typeSupported; this.config = config; this.vendor = vendor; this.id = id; } configure(transmuxConfig) { this.transmuxConfig = transmuxConfig; if (this.decrypter) { this.decrypter.reset(); } } push(data, decryptdata, chunkMeta, state) { const stats = chunkMeta.transmuxing; stats.executeStart = now(); let uintData = new Uint8Array(data); const { currentTransmuxState, transmuxConfig } = this; if (state) { this.currentTransmuxState = state; } const { contiguous, discontinuity, trackSwitch, accurateTimeOffset, timeOffset, initSegmentChange } = state || currentTransmuxState; const { audioCodec, videoCodec, defaultInitPts, duration, initSegmentData } = transmuxConfig; const keyData = getEncryptionType(uintData, decryptdata); if (keyData && keyData.method === 'AES-128') { const decrypter = this.getDecrypter(); // Software decryption is synchronous; webCrypto is not if (decrypter.isSync()) { // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached // data is handled in the flush() call let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer); // For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress const loadingParts = chunkMeta.part > -1; if (loadingParts) { decryptedData = decrypter.flush(); } if (!decryptedData) { stats.executeEnd = now(); return emptyResult(chunkMeta); } uintData = new Uint8Array(decryptedData); } else { this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => { // Calling push here is important; if flush() is called while this is still resolving, this ensures that // the decrypted data has been transmuxed const result = this.push(decryptedData, null, chunkMeta); this.decryptionPromise = null; return result; }); return this.decryptionPromise; } } const resetMuxers = this.needsProbing(discontinuity, trackSwitch); if (resetMuxers) { const error = this.configureTransmuxer(uintData); if (error) { logger.warn(`[transmuxer] ${error.message}`); this.observer.emit(Events.ERROR, Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, error, reason: error.message }); stats.executeEnd = now(); return emptyResult(chunkMeta); } } if (discontinuity || trackSwitch || initSegmentChange || resetMuxers) { this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration, decryptdata); } if (discontinuity || initSegmentChange || resetMuxers) { this.resetInitialTimestamp(defaultInitPts); } if (!contiguous) { this.resetContiguity(); } const result = this.transmux(uintData, keyData, timeOffset, accurateTimeOffset, chunkMeta); const currentState = this.currentTransmuxState; currentState.contiguous = true; currentState.discontinuity = false; currentState.trackSwitch = false; stats.executeEnd = now(); return result; } // Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type) flush(chunkMeta) { const stats = chunkMeta.transmuxing; stats.executeStart = now(); const { decrypter, currentTransmuxState, decryptionPromise } = this; if (decryptionPromise) { // Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore // only flushing is required for async decryption return decryptionPromise.then(() => { return this.flush(chunkMeta); }); } const transmuxResults = []; const { timeOffset } = currentTransmuxState; if (decrypter) { // The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults // This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads, // or for progressive downloads with small segments) const decryptedData = decrypter.flush(); if (decryptedData) { // Push always returns a TransmuxerResult if decryptdata is null transmuxResults.push(this.push(decryptedData, null, chunkMeta)); } } const { demuxer, remuxer } = this; if (!demuxer || !remuxer) { // If probing failed, then Hls.js has been given content its not able to handle stats.executeEnd = now(); return [emptyResult(chunkMeta)]; } const demuxResultOrPromise = demuxer.flush(timeOffset); if (isPromise(demuxResultOrPromise)) { // Decrypt final SAMPLE-AES samples return demuxResultOrPromise.then(demuxResult => { this.flushRemux(transmuxResults, demuxResult, chunkMeta); return transmuxResults; }); } this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta); return transmuxResults; } flushRemux(transmuxResults, demuxResult, chunkMeta) { const { audioTrack, videoTrack, id3Track, textTrack } = demuxResult; const { accurateTimeOffset, timeOffset } = this.currentTransmuxState; logger.log(`[transmuxer.ts]: Flushed fragment ${chunkMeta.sn}${chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : ''} of level ${chunkMeta.level}`); const remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, true, this.id); transmuxResults.push({ remuxResult, chunkMeta }); chunkMeta.transmuxing.executeEnd = now(); } resetInitialTimestamp(defaultInitPts) { const { demuxer, remuxer } = this; if (!demuxer || !remuxer) { return; } demuxer.resetTimeStamp(defaultInitPts); remuxer.resetTimeStamp(defaultInitPts); } resetContiguity() { const { demuxer, remuxer } = this; if (!demuxer || !remuxer) { return; } demuxer.resetContiguity(); remuxer.resetNextTimestamp(); } resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration, decryptdata) { const { demuxer, remuxer } = this; if (!demuxer || !remuxer) { return; } demuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration); remuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec, decryptdata); } destroy() { if (this.demuxer) { this.demuxer.destroy(); this.demuxer = undefined; } if (this.remuxer) { this.remuxer.destroy(); this.remuxer = undefined; } } transmux(data, keyData, timeOffset, accurateTimeOffset, chunkMeta) { let result; if (keyData && keyData.method === 'SAMPLE-AES') { result = this.transmuxSampleAes(data, keyData, timeOffset, accurateTimeOffset, chunkMeta); } else { result = this.transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta); } return result; } transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta) { const { audioTrack, videoTrack, id3Track, textTrack } = this.demuxer.demux(data, timeOffset, false, !this.config.progressive); const remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, false, this.id); return { remuxResult, chunkMeta }; } transmuxSampleAes(data, decryptData, timeOffset, accurateTimeOffset, chunkMeta) { return this.demuxer.demuxSampleAes(data, decryptData, timeOffset).then(demuxResult => { const remuxResult = this.remuxer.remux(demuxResult.audioTrack, demuxResult.videoTrack, demuxResult.id3Track, demuxResult.textTrack, timeOffset, accurateTimeOffset, false, this.id); return { remuxResult, chunkMeta }; }); } configureTransmuxer(data) { const { config, observer, typeSupported, vendor } = this; // probe for content type let mux; for (let i = 0, len = muxConfig.length; i < len; i++) { if (muxConfig[i].demux.probe(data)) { mux = muxConfig[i]; break; } } if (!mux) { return new Error('Failed to find demuxer by probing fragment data'); } // so let's check that current remuxer and demuxer are still valid const demuxer = this.demuxer; const remuxer = this.remuxer; const Remuxer = mux.remux; const Demuxer = mux.demux; if (!remuxer || !(remuxer instanceof Remuxer)) { this.remuxer = new Remuxer(observer, config, typeSupported, vendor); } if (!demuxer || !(demuxer instanceof Demuxer)) { this.demuxer = new Demuxer(observer, config, typeSupported); this.probe = Demuxer.probe; } } needsProbing(discontinuity, trackSwitch) { // in case of continuity change, or track switch // we might switch from content type (AAC container to TS container, or TS to fmp4 for example) return !this.demuxer || !this.remuxer || discontinuity || trackSwitch; } getDecrypter() { let decrypter = this.decrypter; if (!decrypter) { decrypter = this.decrypter = new Decrypter(this.config); } return decrypter; } } function getEncryptionType(data, decryptData) { let encryptionType = null; if (data.byteLength > 0 && decryptData != null && decryptData.key != null && decryptData.iv !== null && decryptData.method != null) { encryptionType = decryptData; } return encryptionType; } const emptyResult = chunkMeta => ({ remuxResult: {}, chunkMeta }); function isPromise(p) { return 'then' in p && p.then instanceof Function; } class TransmuxConfig { constructor(audioCodec, videoCodec, initSegmentData, duration, defaultInitPts) { this.audioCodec = void 0; this.videoCodec = void 0; this.initSegmentData = void 0; this.duration = void 0; this.defaultInitPts = void 0; this.audioCodec = audioCodec; this.videoCodec = videoCodec; this.initSegmentData = initSegmentData; this.duration = duration; this.defaultInitPts = defaultInitPts || null; } } class TransmuxState { constructor(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange) { this.discontinuity = void 0; this.contiguous = void 0; this.accurateTimeOffset = void 0; this.trackSwitch = void 0; this.timeOffset = void 0; this.initSegmentChange = void 0; this.discontinuity = discontinuity; this.contiguous = contiguous; this.accurateTimeOffset = accurateTimeOffset; this.trackSwitch = trackSwitch; this.timeOffset = timeOffset; this.initSegmentChange = initSegmentChange; } } var eventemitter3 = {exports: {}}; (function (module) { var has = Object.prototype.hasOwnProperty , prefix = '~'; /** * Constructor to create a storage for our `EE` objects. * An `Events` instance is a plain object whose properties are event names. * * @constructor * @private */ function Events() {} // // We try to not inherit from `Object.prototype`. In some engines creating an // instance in this way is faster than calling `Object.create(null)` directly. // If `Object.create(null)` is not supported we prefix the event names with a // character to make sure that the built-in object properties are not // overridden or used as an attack vector. // if (Object.create) { Events.prototype = Object.create(null); // // This hack is needed because the `__proto__` property is still inherited in // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5. // if (!new Events().__proto__) prefix = false; } /** * Representation of a single event listener. * * @param {Function} fn The listener function. * @param {*} context The context to invoke the listener with. * @param {Boolean} [once=false] Specify if the listener is a one-time listener. * @constructor * @private */ function EE(fn, context, once) { this.fn = fn; this.context = context; this.once = once || false; } /** * Add a listener for a given event. * * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} context The context to invoke the listener with. * @param {Boolean} once Specify if the listener is a one-time listener. * @returns {EventEmitter} * @private */ function addListener(emitter, event, fn, context, once) { if (typeof fn !== 'function') { throw new TypeError('The listener must be a function'); } var listener = new EE(fn, context || emitter, once) , evt = prefix ? prefix + event : event; if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++; else if (!emitter._events[evt].fn) emitter._events[evt].push(listener); else emitter._events[evt] = [emitter._events[evt], listener]; return emitter; } /** * Clear event by name. * * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. * @param {(String|Symbol)} evt The Event name. * @private */ function clearEvent(emitter, evt) { if (--emitter._eventsCount === 0) emitter._events = new Events(); else delete emitter._events[evt]; } /** * Minimal `EventEmitter` interface that is molded against the Node.js * `EventEmitter` interface. * * @constructor * @public */ function EventEmitter() { this._events = new Events(); this._eventsCount = 0; } /** * Return an array listing the events for which the emitter has registered * listeners. * * @returns {Array} * @public */ EventEmitter.prototype.eventNames = function eventNames() { var names = [] , events , name; if (this._eventsCount === 0) return names; for (name in (events = this._events)) { if (has.call(events, name)) names.push(prefix ? name.slice(1) : name); } if (Object.getOwnPropertySymbols) { return names.concat(Object.getOwnPropertySymbols(events)); } return names; }; /** * Return the listeners registered for a given event. * * @param {(String|Symbol)} event The event name. * @returns {Array} The registered listeners. * @public */ EventEmitter.prototype.listeners = function listeners(event) { var evt = prefix ? prefix + event : event , handlers = this._events[evt]; if (!handlers) return []; if (handlers.fn) return [handlers.fn]; for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) { ee[i] = handlers[i].fn; } return ee; }; /** * Return the number of listeners listening to a given event. * * @param {(String|Symbol)} event The event name. * @returns {Number} The number of listeners. * @public */ EventEmitter.prototype.listenerCount = function listenerCount(event) { var evt = prefix ? prefix + event : event , listeners = this._events[evt]; if (!listeners) return 0; if (listeners.fn) return 1; return listeners.length; }; /** * Calls each of the listeners registered for a given event. * * @param {(String|Symbol)} event The event name. * @returns {Boolean} `true` if the event had listeners, else `false`. * @public */ EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) { var evt = prefix ? prefix + event : event; if (!this._events[evt]) return false; var listeners = this._events[evt] , len = arguments.length , args , i; if (listeners.fn) { if (listeners.once) this.removeListener(event, listeners.fn, undefined, true); switch (len) { case 1: return listeners.fn.call(listeners.context), true; case 2: return listeners.fn.call(listeners.context, a1), true; case 3: return listeners.fn.call(listeners.context, a1, a2), true; case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true; case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true; case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true; } for (i = 1, args = new Array(len -1); i < len; i++) { args[i - 1] = arguments[i]; } listeners.fn.apply(listeners.context, args); } else { var length = listeners.length , j; for (i = 0; i < length; i++) { if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true); switch (len) { case 1: listeners[i].fn.call(listeners[i].context); break; case 2: listeners[i].fn.call(listeners[i].context, a1); break; case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break; case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break; default: if (!args) for (j = 1, args = new Array(len -1); j < len; j++) { args[j - 1] = arguments[j]; } listeners[i].fn.apply(listeners[i].context, args); } } } return true; }; /** * Add a listener for a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} [context=this] The context to invoke the listener with. * @returns {EventEmitter} `this`. * @public */ EventEmitter.prototype.on = function on(event, fn, context) { return addListener(this, event, fn, context, false); }; /** * Add a one-time listener for a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} [context=this] The context to invoke the listener with. * @returns {EventEmitter} `this`. * @public */ EventEmitter.prototype.once = function once(event, fn, context) { return addListener(this, event, fn, context, true); }; /** * Remove the listeners of a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn Only remove the listeners that match this function. * @param {*} context Only remove the listeners that have this context. * @param {Boolean} once Only remove one-time listeners. * @returns {EventEmitter} `this`. * @public */ EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) { var evt = prefix ? prefix + event : event; if (!this._events[evt]) return this; if (!fn) { clearEvent(this, evt); return this; } var listeners = this._events[evt]; if (listeners.fn) { if ( listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context) ) { clearEvent(this, evt); } } else { for (var i = 0, events = [], length = listeners.length; i < length; i++) { if ( listeners[i].fn !== fn || (once && !listeners[i].once) || (context && listeners[i].context !== context) ) { events.push(listeners[i]); } } // // Reset the array, or remove it completely if we have no more listeners. // if (events.length) this._events[evt] = events.length === 1 ? events[0] : events; else clearEvent(this, evt); } return this; }; /** * Remove all listeners, or those of the specified event. * * @param {(String|Symbol)} [event] The event name. * @returns {EventEmitter} `this`. * @public */ EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) { var evt; if (event) { evt = prefix ? prefix + event : event; if (this._events[evt]) clearEvent(this, evt); } else { this._events = new Events(); this._eventsCount = 0; } return this; }; // // Alias methods names because people roll like that. // EventEmitter.prototype.off = EventEmitter.prototype.removeListener; EventEmitter.prototype.addListener = EventEmitter.prototype.on; // // Expose the prefix. // EventEmitter.prefixed = prefix; // // Allow `EventEmitter` to be imported as module namespace. // EventEmitter.EventEmitter = EventEmitter; // // Expose the module. // { module.exports = EventEmitter; } } (eventemitter3)); var eventemitter3Exports = eventemitter3.exports; var EventEmitter = /*@__PURE__*/getDefaultExportFromCjs(eventemitter3Exports); const MediaSource$1 = getMediaSource() || { isTypeSupported: () => false }; class TransmuxerInterface { constructor(hls, id, onTransmuxComplete, onFlush) { this.error = null; this.hls = void 0; this.id = void 0; this.observer = void 0; this.frag = null; this.part = null; this.useWorker = void 0; this.workerContext = null; this.onwmsg = void 0; this.transmuxer = null; this.onTransmuxComplete = void 0; this.onFlush = void 0; const config = hls.config; this.hls = hls; this.id = id; this.useWorker = !!config.enableWorker; this.onTransmuxComplete = onTransmuxComplete; this.onFlush = onFlush; const forwardMessage = (ev, data) => { data = data || {}; data.frag = this.frag; data.id = this.id; if (ev === Events.ERROR) { this.error = data.error; } this.hls.trigger(ev, data); }; // forward events to main thread this.observer = new EventEmitter(); this.observer.on(Events.FRAG_DECRYPTED, forwardMessage); this.observer.on(Events.ERROR, forwardMessage); const typeSupported = { mp4: MediaSource$1.isTypeSupported('video/mp4'), mpeg: MediaSource$1.isTypeSupported('audio/mpeg'), mp3: MediaSource$1.isTypeSupported('audio/mp4; codecs="mp3"') }; // navigator.vendor is not always available in Web Worker // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator const vendor = navigator.vendor; if (this.useWorker && typeof Worker !== 'undefined') { const canCreateWorker = config.workerPath || hasUMDWorker(); if (canCreateWorker) { try { if (config.workerPath) { logger.log(`loading Web Worker ${config.workerPath} for "${id}"`); this.workerContext = loadWorker(config.workerPath); } else { logger.log(`injecting Web Worker for "${id}"`); this.workerContext = injectWorker(); } this.onwmsg = ev => this.onWorkerMessage(ev); const { worker } = this.workerContext; worker.addEventListener('message', this.onwmsg); worker.onerror = event => { const error = new Error(`${event.message} (${event.filename}:${event.lineno})`); config.enableWorker = false; logger.warn(`Error in "${id}" Web Worker, fallback to inline`); this.hls.trigger(Events.ERROR, { type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.INTERNAL_EXCEPTION, fatal: false, event: 'demuxerWorker', error }); }; worker.postMessage({ cmd: 'init', typeSupported: typeSupported, vendor: vendor, id: id, config: JSON.stringify(config) }); } catch (err) { logger.warn(`Error setting up "${id}" Web Worker, fallback to inline`, err); this.resetWorker(); this.error = null; this.transmuxer = new Transmuxer$1(this.observer, typeSupported, config, vendor, id); } return; } } this.transmuxer = new Transmuxer$1(this.observer, typeSupported, config, vendor, id); } resetWorker() { if (this.workerContext) { const { worker, objectURL } = this.workerContext; if (objectURL) { // revoke the Object URL that was used to create transmuxer worker, so as not to leak it self.URL.revokeObjectURL(objectURL); } worker.removeEventListener('message', this.onwmsg); worker.onerror = null; worker.terminate(); this.workerContext = null; } } destroy() { if (this.workerContext) { this.resetWorker(); this.onwmsg = undefined; } else { const transmuxer = this.transmuxer; if (transmuxer) { transmuxer.destroy(); this.transmuxer = null; } } const observer = this.observer; if (observer) { observer.removeAllListeners(); } this.frag = null; // @ts-ignore this.observer = null; // @ts-ignore this.hls = null; } push(data, initSegmentData, audioCodec, videoCodec, frag, part, duration, accurateTimeOffset, chunkMeta, defaultInitPTS) { var _frag$initSegment, _lastFrag$initSegment; chunkMeta.transmuxing.start = self.performance.now(); const { transmuxer } = this; const timeOffset = part ? part.start : frag.start; // TODO: push "clear-lead" decrypt data for unencrypted fragments in streams with encrypted ones const decryptdata = frag.decryptdata; const lastFrag = this.frag; const discontinuity = !(lastFrag && frag.cc === lastFrag.cc); const trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level); const snDiff = lastFrag ? chunkMeta.sn - lastFrag.sn : -1; const partDiff = this.part ? chunkMeta.part - this.part.index : -1; const progressive = snDiff === 0 && chunkMeta.id > 1 && chunkMeta.id === (lastFrag == null ? void 0 : lastFrag.stats.chunkCount); const contiguous = !trackSwitch && (snDiff === 1 || snDiff === 0 && (partDiff === 1 || progressive && partDiff <= 0)); const now = self.performance.now(); if (trackSwitch || snDiff || frag.stats.parsing.start === 0) { frag.stats.parsing.start = now; } if (part && (partDiff || !contiguous)) { part.stats.parsing.start = now; } const initSegmentChange = !(lastFrag && ((_frag$initSegment = frag.initSegment) == null ? void 0 : _frag$initSegment.url) === ((_lastFrag$initSegment = lastFrag.initSegment) == null ? void 0 : _lastFrag$initSegment.url)); const state = new TransmuxState(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange); if (!contiguous || discontinuity || initSegmentChange) { logger.log(`[transmuxer-interface, ${frag.type}]: Starting new transmux session for sn: ${chunkMeta.sn} p: ${chunkMeta.part} level: ${chunkMeta.level} id: ${chunkMeta.id} discontinuity: ${discontinuity} trackSwitch: ${trackSwitch} contiguous: ${contiguous} accurateTimeOffset: ${accurateTimeOffset} timeOffset: ${timeOffset} initSegmentChange: ${initSegmentChange}`); const config = new TransmuxConfig(audioCodec, videoCodec, initSegmentData, duration, defaultInitPTS); this.configureTransmuxer(config); } this.frag = frag; this.part = part; // Frags with sn of 'initSegment' are not transmuxed if (this.workerContext) { // post fragment payload as transferable objects for ArrayBuffer (no copy) this.workerContext.worker.postMessage({ cmd: 'demux', data, decryptdata, chunkMeta, state }, data instanceof ArrayBuffer ? [data] : []); } else if (transmuxer) { const transmuxResult = transmuxer.push(data, decryptdata, chunkMeta, state); if (isPromise(transmuxResult)) { transmuxer.async = true; transmuxResult.then(data => { this.handleTransmuxComplete(data); }).catch(error => { this.transmuxerError(error, chunkMeta, 'transmuxer-interface push error'); }); } else { transmuxer.async = false; this.handleTransmuxComplete(transmuxResult); } } } flush(chunkMeta) { chunkMeta.transmuxing.start = self.performance.now(); const { transmuxer } = this; if (this.workerContext) { this.workerContext.worker.postMessage({ cmd: 'flush', chunkMeta }); } else if (transmuxer) { let transmuxResult = transmuxer.flush(chunkMeta); const asyncFlush = isPromise(transmuxResult); if (asyncFlush || transmuxer.async) { if (!isPromise(transmuxResult)) { transmuxResult = Promise.resolve(transmuxResult); } transmuxResult.then(data => { this.handleFlushResult(data, chunkMeta); }).catch(error => { this.transmuxerError(error, chunkMeta, 'transmuxer-interface flush error'); }); } else { this.handleFlushResult(transmuxResult, chunkMeta); } } } transmuxerError(error, chunkMeta, reason) { if (!this.hls) { return; } this.error = error; this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, chunkMeta, fatal: false, error, err: error, reason }); } handleFlushResult(results, chunkMeta) { results.forEach(result => { this.handleTransmuxComplete(result); }); this.onFlush(chunkMeta); } onWorkerMessage(ev) { const data = ev.data; const hls = this.hls; switch (data.event) { case 'init': { var _this$workerContext; const objectURL = (_this$workerContext = this.workerContext) == null ? void 0 : _this$workerContext.objectURL; if (objectURL) { // revoke the Object URL that was used to create transmuxer worker, so as not to leak it self.URL.revokeObjectURL(objectURL); } break; } case 'transmuxComplete': { this.handleTransmuxComplete(data.data); break; } case 'flush': { this.onFlush(data.data); break; } // pass logs from the worker thread to the main logger case 'workerLog': if (logger[data.data.logType]) { logger[data.data.logType](data.data.message); } break; default: { data.data = data.data || {}; data.data.frag = this.frag; data.data.id = this.id; hls.trigger(data.event, data.data); break; } } } configureTransmuxer(config) { const { transmuxer } = this; if (this.workerContext) { this.workerContext.worker.postMessage({ cmd: 'configure', config }); } else if (transmuxer) { transmuxer.configure(config); } } handleTransmuxComplete(result) { result.chunkMeta.transmuxing.end = self.performance.now(); this.onTransmuxComplete(result); } } const STALL_MINIMUM_DURATION_MS = 250; const MAX_START_GAP_JUMP = 2.0; const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1; const SKIP_BUFFER_RANGE_START = 0.05; class GapController { constructor(config, media, fragmentTracker, hls) { this.config = void 0; this.media = null; this.fragmentTracker = void 0; this.hls = void 0; this.nudgeRetry = 0; this.stallReported = false; this.stalled = null; this.moved = false; this.seeking = false; this.config = config; this.media = media; this.fragmentTracker = fragmentTracker; this.hls = hls; } destroy() { this.media = null; // @ts-ignore this.hls = this.fragmentTracker = null; } /** * Checks if the playhead is stuck within a gap, and if so, attempts to free it. * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range). * * @param lastCurrentTime - Previously read playhead position */ poll(lastCurrentTime, activeFrag) { const { config, media, stalled } = this; if (media === null) { return; } const { currentTime, seeking } = media; const seeked = this.seeking && !seeking; const beginSeek = !this.seeking && seeking; this.seeking = seeking; // The playhead is moving, no-op if (currentTime !== lastCurrentTime) { this.moved = true; if (stalled !== null) { // The playhead is now moving, but was previously stalled if (this.stallReported) { const _stalledDuration = self.performance.now() - stalled; logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`); this.stallReported = false; } this.stalled = null; this.nudgeRetry = 0; } return; } // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek if (beginSeek || seeked) { this.stalled = null; return; } // The playhead should not be moving if (media.paused && !seeking || media.ended || media.playbackRate === 0 || !BufferHelper.getBuffered(media).length) { return; } const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0); const isBuffered = bufferInfo.len > 0; const nextStart = bufferInfo.nextStart || 0; // There is no playable buffer (seeked, waiting for buffer) if (!isBuffered && !nextStart) { return; } if (seeking) { // Waiting for seeking in a buffered range to complete const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP; // Next buffered range is too far ahead to jump to while still seeking const noBufferGap = !nextStart || activeFrag && activeFrag.start <= currentTime || nextStart - currentTime > MAX_START_GAP_JUMP && !this.fragmentTracker.getPartialFragment(currentTime); if (hasEnoughBuffer || noBufferGap) { return; } // Reset moved state when seeking to a point in or before a gap this.moved = false; } // Skip start gaps if we haven't played, but the last poll detected the start of a stall // The addition poll gives the browser a chance to jump the gap for us if (!this.moved && this.stalled !== null) { var _level$details; // Jump start gaps within jump threshold const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime; // When joining a live stream with audio tracks, account for live playlist window sliding by allowing // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment // that begins over 1 target duration after the video start position. const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null; const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live; const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP; const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime); if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) { this._trySkipBufferHole(partialOrGap); return; } } // Start tracking stall time const tnow = self.performance.now(); if (stalled === null) { this.stalled = tnow; return; } const stalledDuration = tnow - stalled; if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) { // Report stalling after trying to fix this._reportStall(bufferInfo); if (!this.media) { return; } } const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole); this._tryFixBufferStall(bufferedWithHoles, stalledDuration); } /** * Detects and attempts to fix known buffer stalling issues. * @param bufferInfo - The properties of the current buffer. * @param stalledDurationMs - The amount of time Hls.js has been stalling for. * @private */ _tryFixBufferStall(bufferInfo, stalledDurationMs) { const { config, fragmentTracker, media } = this; if (media === null) { return; } const currentTime = media.currentTime; const partial = fragmentTracker.getPartialFragment(currentTime); if (partial) { // Try to skip over the buffer hole caused by a partial fragment // This method isn't limited by the size of the gap between buffered ranges const targetTime = this._trySkipBufferHole(partial); // we return here in this case, meaning // the branch below only executes when we haven't seeked to a new position if (targetTime || !this.media) { return; } } // if we haven't had to skip over a buffer hole of a partial fragment // we may just have to "nudge" the playlist as the browser decoding/rendering engine // needs to cross some sort of threshold covering all source-buffers content // to start playing properly. if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) { logger.warn('Trying to nudge playhead over buffer-hole'); // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds // We only try to jump the hole if it's under the configured size // Reset stalled so to rearm watchdog timer this.stalled = null; this._tryNudgeBuffer(); } } /** * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period. * @param bufferLen - The playhead distance from the end of the current buffer segment. * @private */ _reportStall(bufferInfo) { const { hls, media, stallReported } = this; if (!stallReported && media) { // Report stalled error once this.stallReported = true; const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`); logger.warn(error.message); hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_STALLED_ERROR, fatal: false, error, buffer: bufferInfo.len }); } } /** * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments * @param partial - The partial fragment found at the current time (where playback is stalling). * @private */ _trySkipBufferHole(partial) { const { config, hls, media } = this; if (media === null) { return 0; } // Check if currentTime is between unbuffered regions of partial fragments const currentTime = media.currentTime; const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0); const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart; if (startTime) { const bufferStarved = bufferInfo.len <= config.maxBufferHole; const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3; const gapLength = startTime - currentTime; if (gapLength > 0 && (bufferStarved || waiting)) { // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial if (gapLength > config.maxBufferHole) { const { fragmentTracker } = this; let startGap = false; if (currentTime === 0) { const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN); if (startFrag && startTime < startFrag.end) { startGap = true; } } if (!startGap) { const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN); if (startProvisioned) { let moreToLoad = false; let pos = startProvisioned.end; while (pos < startTime) { const provisioned = fragmentTracker.getPartialFragment(pos); if (provisioned) { pos += provisioned.duration; } else { moreToLoad = true; break; } } if (moreToLoad) { return 0; } } } } const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS); logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`); this.moved = true; this.stalled = null; media.currentTime = targetTime; if (partial && !partial.gap) { const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`); hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_SEEK_OVER_HOLE, fatal: false, error, reason: error.message, frag: partial }); } return targetTime; } } return 0; } /** * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount. * @private */ _tryNudgeBuffer() { const { config, hls, media, nudgeRetry } = this; if (media === null) { return; } const currentTime = media.currentTime; this.nudgeRetry++; if (nudgeRetry < config.nudgeMaxRetry) { const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset; // playback stalled in buffered area ... let's nudge currentTime to try to overcome this const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`); logger.warn(error.message); media.currentTime = targetTime; hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_NUDGE_ON_STALL, error, fatal: false }); } else { const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`); logger.error(error.message); hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_STALLED_ERROR, error, fatal: true }); } } } const TICK_INTERVAL$2 = 100; // how often to tick in ms class StreamController extends BaseStreamController { constructor(hls, fragmentTracker, keyLoader) { super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN); this.audioCodecSwap = false; this.gapController = null; this.level = -1; this._forceStartLoad = false; this.altAudio = false; this.audioOnly = false; this.fragPlaying = null; this.onvplaying = null; this.onvseeked = null; this.fragLastKbps = 0; this.couldBacktrack = false; this.backtrackFragment = null; this.audioCodecSwitch = false; this.videoBuffer = null; this._registerListeners(); } _registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this); hls.on(Events.ERROR, this.onError, this); hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this); hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this); hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this); hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this); hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this); hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this); hls.off(Events.ERROR, this.onError, this); hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this); hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this); hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this); hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this); hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this); hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this); } onHandlerDestroying() { this._unregisterListeners(); this.onMediaDetaching(); } startLoad(startPosition) { if (this.levels) { const { lastCurrentTime, hls } = this; this.stopLoad(); this.setInterval(TICK_INTERVAL$2); this.level = -1; if (!this.startFragRequested) { // determine load level let startLevel = hls.startLevel; if (startLevel === -1) { if (hls.config.testBandwidth && this.levels.length > 1) { // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level startLevel = 0; this.bitrateTest = true; } else { startLevel = hls.nextAutoLevel; } } // set new level to playlist loader : this will trigger start level load // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded this.level = hls.nextLoadLevel = startLevel; this.loadedmetadata = false; } // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime if (lastCurrentTime > 0 && startPosition === -1) { this.log(`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`); startPosition = lastCurrentTime; } this.state = State.IDLE; this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition; this.tick(); } else { this._forceStartLoad = true; this.state = State.STOPPED; } } stopLoad() { this._forceStartLoad = false; super.stopLoad(); } doTick() { switch (this.state) { case State.WAITING_LEVEL: { var _levels$level; const { levels, level } = this; const details = levels == null ? void 0 : (_levels$level = levels[level]) == null ? void 0 : _levels$level.details; if (details && (!details.live || this.levelLastLoaded === this.level)) { if (this.waitForCdnTuneIn(details)) { break; } this.state = State.IDLE; break; } else if (this.hls.nextLoadLevel !== this.level) { this.state = State.IDLE; break; } break; } case State.FRAG_LOADING_WAITING_RETRY: { var _this$media; const now = self.performance.now(); const retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading if (!retryDate || now >= retryDate || (_this$media = this.media) != null && _this$media.seeking) { this.resetStartWhenNotLoaded(this.level); this.state = State.IDLE; } } break; } if (this.state === State.IDLE) { this.doTickIdle(); } this.onTickEnd(); } onTickEnd() { super.onTickEnd(); this.checkBuffer(); this.checkFragmentChanged(); } doTickIdle() { const { hls, levelLastLoaded, levels, media } = this; const { config, nextLoadLevel: level } = hls; // if start level not parsed yet OR // if video not attached AND start fragment already requested OR start frag prefetch not enabled // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment if (levelLastLoaded === null || !media && (this.startFragRequested || !config.startFragPrefetch)) { return; } // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything if (this.altAudio && this.audioOnly) { return; } if (!(levels != null && levels[level])) { return; } const levelInfo = levels[level]; // if buffer length is less than maxBufLen try to load a new fragment const bufferInfo = this.getMainFwdBufferInfo(); if (bufferInfo === null) { return; } const lastDetails = this.getLevelDetails(); if (lastDetails && this._streamEnded(bufferInfo, lastDetails)) { const data = {}; if (this.altAudio) { data.type = 'video'; } this.hls.trigger(Events.BUFFER_EOS, data); this.state = State.ENDED; return; } // set next load level : this will trigger a playlist load if needed if (hls.loadLevel !== level && hls.manualLevel === -1) { this.log(`Adapting to level ${level} from level ${this.level}`); } this.level = hls.nextLoadLevel = level; const levelDetails = levelInfo.details; // if level info not retrieved yet, switch state and wait for level retrieval // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist) if (!levelDetails || this.state === State.WAITING_LEVEL || levelDetails.live && this.levelLastLoaded !== level) { this.level = level; this.state = State.WAITING_LEVEL; return; } const bufferLen = bufferInfo.len; // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate); // Stay idle if we are still with buffer margins if (bufferLen >= maxBufLen) { return; } if (this.backtrackFragment && this.backtrackFragment.start > bufferInfo.end) { this.backtrackFragment = null; } const targetBufferTime = this.backtrackFragment ? this.backtrackFragment.start : bufferInfo.end; let frag = this.getNextFragment(targetBufferTime, levelDetails); // Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`) if (this.couldBacktrack && !this.fragPrevious && frag && frag.sn !== 'initSegment' && this.fragmentTracker.getState(frag) !== FragmentState.OK) { var _this$backtrackFragme; const backtrackSn = ((_this$backtrackFragme = this.backtrackFragment) != null ? _this$backtrackFragme : frag).sn; const fragIdx = backtrackSn - levelDetails.startSN; const backtrackFrag = levelDetails.fragments[fragIdx - 1]; if (backtrackFrag && frag.cc === backtrackFrag.cc) { frag = backtrackFrag; this.fragmentTracker.removeFragment(backtrackFrag); } } else if (this.backtrackFragment && bufferInfo.len) { this.backtrackFragment = null; } // Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags if (frag && this.isLoopLoading(frag, targetBufferTime)) { const gapStart = frag.gap; if (!gapStart) { // Cleanup the fragment tracker before trying to find the next unbuffered fragment const type = this.audioOnly && !this.altAudio ? ElementaryStreamTypes.AUDIO : ElementaryStreamTypes.VIDEO; const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media; if (mediaBuffer) { this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN); } } frag = this.getNextFragmentLoopLoading(frag, levelDetails, bufferInfo, PlaylistLevelType.MAIN, maxBufLen); } if (!frag) { return; } if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) { frag = frag.initSegment; } this.loadFragment(frag, levelInfo, targetBufferTime); } loadFragment(frag, level, targetBufferTime) { // Check if fragment is not loaded const fragState = this.fragmentTracker.getState(frag); this.fragCurrent = frag; if (fragState === FragmentState.NOT_LOADED || fragState === FragmentState.PARTIAL) { if (frag.sn === 'initSegment') { this._loadInitSegment(frag, level); } else if (this.bitrateTest) { this.log(`Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`); this._loadBitrateTestFrag(frag, level); } else { this.startFragRequested = true; super.loadFragment(frag, level, targetBufferTime); } } else { this.clearTrackerIfNeeded(frag); } } getBufferedFrag(position) { return this.fragmentTracker.getBufferedFrag(position, PlaylistLevelType.MAIN); } followingBufferedFrag(frag) { if (frag) { // try to get range of next fragment (500ms after this range) return this.getBufferedFrag(frag.end + 0.5); } return null; } /* on immediate level switch : - pause playback if playing - cancel any pending load request - and trigger a buffer flush */ immediateLevelSwitch() { this.abortCurrentFrag(); this.flushMainBuffer(0, Number.POSITIVE_INFINITY); } /** * try to switch ASAP without breaking video playback: * in order to ensure smooth but quick level switching, * we need to find the next flushable buffer range * we should take into account new segment fetch time */ nextLevelSwitch() { const { levels, media } = this; // ensure that media is defined and that metadata are available (to retrieve currentTime) if (media != null && media.readyState) { let fetchdelay; const fragPlayingCurrent = this.getAppendedFrag(media.currentTime); if (fragPlayingCurrent && fragPlayingCurrent.start > 1) { // flush buffer preceding current fragment (flush until current fragment start offset) // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ... this.flushMainBuffer(0, fragPlayingCurrent.start - 1); } const levelDetails = this.getLevelDetails(); if (levelDetails != null && levelDetails.live) { const bufferInfo = this.getMainFwdBufferInfo(); // Do not flush in live stream with low buffer if (!bufferInfo || bufferInfo.len < levelDetails.targetduration * 2) { return; } } if (!media.paused && levels) { // add a safety delay of 1s const nextLevelId = this.hls.nextLoadLevel; const nextLevel = levels[nextLevelId]; const fragLastKbps = this.fragLastKbps; if (fragLastKbps && this.fragCurrent) { fetchdelay = this.fragCurrent.duration * nextLevel.maxBitrate / (1000 * fragLastKbps) + 1; } else { fetchdelay = 0; } } else { fetchdelay = 0; } // this.log('fetchdelay:'+fetchdelay); // find buffer range that will be reached once new fragment will be fetched const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay); if (bufferedFrag) { // we can flush buffer range following this one without stalling playback const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag); if (nextBufferedFrag) { // if we are here, we can also cancel any loading/demuxing in progress, as they are useless this.abortCurrentFrag(); // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback. const maxStart = nextBufferedFrag.maxStartPTS ? nextBufferedFrag.maxStartPTS : nextBufferedFrag.start; const fragDuration = nextBufferedFrag.duration; const startPts = Math.max(bufferedFrag.end, maxStart + Math.min(Math.max(fragDuration - this.config.maxFragLookUpTolerance, fragDuration * 0.5), fragDuration * 0.75)); this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY); } } } } abortCurrentFrag() { const fragCurrent = this.fragCurrent; this.fragCurrent = null; this.backtrackFragment = null; if (fragCurrent) { fragCurrent.abortRequests(); this.fragmentTracker.removeFragment(fragCurrent); } switch (this.state) { case State.KEY_LOADING: case State.FRAG_LOADING: case State.FRAG_LOADING_WAITING_RETRY: case State.PARSING: case State.PARSED: this.state = State.IDLE; break; } this.nextLoadPosition = this.getLoadPosition(); } flushMainBuffer(startOffset, endOffset) { super.flushMainBuffer(startOffset, endOffset, this.altAudio ? 'video' : null); } onMediaAttached(event, data) { super.onMediaAttached(event, data); const media = data.media; this.onvplaying = this.onMediaPlaying.bind(this); this.onvseeked = this.onMediaSeeked.bind(this); media.addEventListener('playing', this.onvplaying); media.addEventListener('seeked', this.onvseeked); this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls); } onMediaDetaching() { const { media } = this; if (media && this.onvplaying && this.onvseeked) { media.removeEventListener('playing', this.onvplaying); media.removeEventListener('seeked', this.onvseeked); this.onvplaying = this.onvseeked = null; this.videoBuffer = null; } this.fragPlaying = null; if (this.gapController) { this.gapController.destroy(); this.gapController = null; } super.onMediaDetaching(); } onMediaPlaying() { // tick to speed up FRAG_CHANGED triggering this.tick(); } onMediaSeeked() { const media = this.media; const currentTime = media ? media.currentTime : null; if (isFiniteNumber(currentTime)) { this.log(`Media seeked to ${currentTime.toFixed(3)}`); } // If seeked was issued before buffer was appended do not tick immediately const bufferInfo = this.getMainFwdBufferInfo(); if (bufferInfo === null || bufferInfo.len === 0) { this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`); return; } // tick to speed up FRAG_CHANGED triggering this.tick(); } onManifestLoading() { // reset buffer on manifest loading this.log('Trigger BUFFER_RESET'); this.hls.trigger(Events.BUFFER_RESET, undefined); this.fragmentTracker.removeAllFragments(); this.couldBacktrack = false; this.startPosition = this.lastCurrentTime = 0; this.levels = this.fragPlaying = this.backtrackFragment = null; this.altAudio = this.audioOnly = false; } onManifestParsed(event, data) { let aac = false; let heaac = false; let codec; data.levels.forEach(level => { // detect if we have different kind of audio codecs used amongst playlists codec = level.audioCodec; if (codec) { if (codec.indexOf('mp4a.40.2') !== -1) { aac = true; } if (codec.indexOf('mp4a.40.5') !== -1) { heaac = true; } } }); this.audioCodecSwitch = aac && heaac && !changeTypeSupported(); if (this.audioCodecSwitch) { this.log('Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'); } this.levels = data.levels; this.startFragRequested = false; } onLevelLoading(event, data) { const { levels } = this; if (!levels || this.state !== State.IDLE) { return; } const level = levels[data.level]; if (!level.details || level.details.live && this.levelLastLoaded !== data.level || this.waitForCdnTuneIn(level.details)) { this.state = State.WAITING_LEVEL; } } onLevelLoaded(event, data) { var _curLevel$details; const { levels } = this; const newLevelId = data.level; const newDetails = data.details; const duration = newDetails.totalduration; if (!levels) { this.warn(`Levels were reset while loading level ${newLevelId}`); return; } this.log(`Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn ? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]` : ''}, cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`); const curLevel = levels[newLevelId]; const fragCurrent = this.fragCurrent; if (fragCurrent && (this.state === State.FRAG_LOADING || this.state === State.FRAG_LOADING_WAITING_RETRY)) { if ((fragCurrent.level !== data.level || fragCurrent.urlId !== curLevel.urlId) && fragCurrent.loader) { this.abortCurrentFrag(); } } let sliding = 0; if (newDetails.live || (_curLevel$details = curLevel.details) != null && _curLevel$details.live) { this.checkLiveUpdate(newDetails); if (newDetails.deltaUpdateFailed) { return; } sliding = this.alignPlaylists(newDetails, curLevel.details); } // override level info curLevel.details = newDetails; this.levelLastLoaded = newLevelId; this.hls.trigger(Events.LEVEL_UPDATED, { details: newDetails, level: newLevelId }); // only switch back to IDLE state if we were waiting for level to start downloading a new fragment if (this.state === State.WAITING_LEVEL) { if (this.waitForCdnTuneIn(newDetails)) { // Wait for Low-Latency CDN Tune-in return; } this.state = State.IDLE; } if (!this.startFragRequested) { this.setStartPosition(newDetails, sliding); } else if (newDetails.live) { this.synchronizeToLiveEdge(newDetails); } // trigger handler right now this.tick(); } _handleFragmentLoadProgress(data) { var _frag$initSegment; const { frag, part, payload } = data; const { levels } = this; if (!levels) { this.warn(`Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`); return; } const currentLevel = levels[frag.level]; const details = currentLevel.details; if (!details) { this.warn(`Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`); this.fragmentTracker.removeFragment(frag); return; } const videoCodec = currentLevel.videoCodec; // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) const accurateTimeOffset = details.PTSKnown || !details.live; const initSegmentData = (_frag$initSegment = frag.initSegment) == null ? void 0 : _frag$initSegment.data; const audioCodec = this._getAudioCodec(currentLevel); // transmux the MPEG-TS data to ISO-BMFF segments // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`); const transmuxer = this.transmuxer = this.transmuxer || new TransmuxerInterface(this.hls, PlaylistLevelType.MAIN, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this)); const partIndex = part ? part.index : -1; const partial = partIndex !== -1; const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial); const initPTS = this.initPTS[frag.cc]; transmuxer.push(payload, initSegmentData, audioCodec, videoCodec, frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS); } onAudioTrackSwitching(event, data) { // if any URL found on new audio track, it is an alternate audio track const fromAltAudio = this.altAudio; const altAudio = !!data.url; // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered // don't do anything if we switch to alt audio: audio stream controller is handling it. // we will just have to change buffer scheduling on audioTrackSwitched if (!altAudio) { if (this.mediaBuffer !== this.media) { this.log('Switching on main audio, use media.buffered to schedule main fragment loading'); this.mediaBuffer = this.media; const fragCurrent = this.fragCurrent; // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch if (fragCurrent) { this.log('Switching to main audio track, cancel main fragment load'); fragCurrent.abortRequests(); this.fragmentTracker.removeFragment(fragCurrent); } // destroy transmuxer to force init segment generation (following audio switch) this.resetTransmuxer(); // switch to IDLE state to load new fragment this.resetLoadingState(); } else if (this.audioOnly) { // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off this.resetTransmuxer(); } const hls = this.hls; // If switching from alt to main audio, flush all audio and trigger track switched if (fromAltAudio) { hls.trigger(Events.BUFFER_FLUSHING, { startOffset: 0, endOffset: Number.POSITIVE_INFINITY, type: null }); this.fragmentTracker.removeAllFragments(); } hls.trigger(Events.AUDIO_TRACK_SWITCHED, data); } } onAudioTrackSwitched(event, data) { const trackId = data.id; const altAudio = !!this.hls.audioTracks[trackId].url; if (altAudio) { const videoBuffer = this.videoBuffer; // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered if (videoBuffer && this.mediaBuffer !== videoBuffer) { this.log('Switching on alternate audio, use video.buffered to schedule main fragment loading'); this.mediaBuffer = videoBuffer; } } this.altAudio = altAudio; this.tick(); } onBufferCreated(event, data) { const tracks = data.tracks; let mediaTrack; let name; let alternate = false; for (const type in tracks) { const track = tracks[type]; if (track.id === 'main') { name = type; mediaTrack = track; // keep video source buffer reference if (type === 'video') { const videoTrack = tracks[type]; if (videoTrack) { this.videoBuffer = videoTrack.buffer; } } } else { alternate = true; } } if (alternate && mediaTrack) { this.log(`Alternate track found, use ${name}.buffered to schedule main fragment loading`); this.mediaBuffer = mediaTrack.buffer; } else { this.mediaBuffer = this.media; } } onFragBuffered(event, data) { const { frag, part } = data; if (frag && frag.type !== PlaylistLevelType.MAIN) { return; } if (this.fragContextChanged(frag)) { // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion // Avoid setting state back to IDLE, since that will interfere with a level switch this.warn(`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${frag.level} finished buffering, but was aborted. state: ${this.state}`); if (this.state === State.PARSED) { this.state = State.IDLE; } return; } const stats = part ? part.stats : frag.stats; this.fragLastKbps = Math.round(8 * stats.total / (stats.buffering.end - stats.loading.first)); if (frag.sn !== 'initSegment') { this.fragPrevious = frag; } this.fragBufferedComplete(frag, part); } onError(event, data) { var _data$context; if (data.fatal) { this.state = State.ERROR; return; } switch (data.details) { case ErrorDetails.FRAG_GAP: case ErrorDetails.FRAG_PARSING_ERROR: case ErrorDetails.FRAG_DECRYPT_ERROR: case ErrorDetails.FRAG_LOAD_ERROR: case ErrorDetails.FRAG_LOAD_TIMEOUT: case ErrorDetails.KEY_LOAD_ERROR: case ErrorDetails.KEY_LOAD_TIMEOUT: this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data); break; case ErrorDetails.LEVEL_LOAD_ERROR: case ErrorDetails.LEVEL_LOAD_TIMEOUT: case ErrorDetails.LEVEL_PARSING_ERROR: // in case of non fatal error while loading level, if level controller is not retrying to load level, switch back to IDLE if (!data.levelRetry && this.state === State.WAITING_LEVEL && ((_data$context = data.context) == null ? void 0 : _data$context.type) === PlaylistContextType.LEVEL) { this.state = State.IDLE; } break; case ErrorDetails.BUFFER_FULL_ERROR: if (!data.parent || data.parent !== 'main') { return; } if (this.reduceLengthAndFlushBuffer(data)) { this.flushMainBuffer(0, Number.POSITIVE_INFINITY); } break; case ErrorDetails.INTERNAL_EXCEPTION: this.recoverWorkerError(data); break; } } // Checks the health of the buffer and attempts to resolve playback stalls. checkBuffer() { const { media, gapController } = this; if (!media || !gapController || !media.readyState) { // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0) return; } if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) { // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null; gapController.poll(this.lastCurrentTime, activeFrag); } this.lastCurrentTime = media.currentTime; } onFragLoadEmergencyAborted() { this.state = State.IDLE; // if loadedmetadata is not set, it means that we are emergency switch down on first frag // in that case, reset startFragRequested flag if (!this.loadedmetadata) { this.startFragRequested = false; this.nextLoadPosition = this.startPosition; } this.tickImmediate(); } onBufferFlushed(event, { type }) { if (type !== ElementaryStreamTypes.AUDIO || this.audioOnly && !this.altAudio) { const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media; this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN); } } onLevelsUpdated(event, data) { this.levels = data.levels; } swapAudioCodec() { this.audioCodecSwap = !this.audioCodecSwap; } /** * Seeks to the set startPosition if not equal to the mediaElement's current time. */ seekToStartPos() { const { media } = this; if (!media) { return; } const currentTime = media.currentTime; let startPosition = this.startPosition; // only adjust currentTime if different from startPosition or if startPosition not buffered // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered if (startPosition >= 0 && currentTime < startPosition) { if (media.seeking) { this.log(`could not seek to ${startPosition}, already seeking at ${currentTime}`); return; } const buffered = BufferHelper.getBuffered(media); const bufferStart = buffered.length ? buffered.start(0) : 0; const delta = bufferStart - startPosition; if (delta > 0 && (delta < this.config.maxBufferHole || delta < this.config.maxFragLookUpTolerance)) { this.log(`adjusting start position by ${delta} to match buffer start`); startPosition += delta; this.startPosition = startPosition; } this.log(`seek to target start position ${startPosition} from current time ${currentTime}`); media.currentTime = startPosition; } } _getAudioCodec(currentLevel) { let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec; if (this.audioCodecSwap && audioCodec) { this.log('Swapping audio codec'); if (audioCodec.indexOf('mp4a.40.5') !== -1) { audioCodec = 'mp4a.40.2'; } else { audioCodec = 'mp4a.40.5'; } } return audioCodec; } _loadBitrateTestFrag(frag, level) { frag.bitrateTest = true; this._doFragLoad(frag, level).then(data => { const { hls } = this; if (!data || this.fragContextChanged(frag)) { return; } level.fragmentError = 0; this.state = State.IDLE; this.startFragRequested = false; this.bitrateTest = false; const stats = frag.stats; // Bitrate tests fragments are neither parsed nor buffered stats.parsing.start = stats.parsing.end = stats.buffering.start = stats.buffering.end = self.performance.now(); hls.trigger(Events.FRAG_LOADED, data); frag.bitrateTest = false; }); } _handleTransmuxComplete(transmuxResult) { var _id3$samples; const id = 'main'; const { hls } = this; const { remuxResult, chunkMeta } = transmuxResult; const context = this.getCurrentContext(chunkMeta); if (!context) { this.resetWhenMissingContext(chunkMeta); return; } const { frag, part, level } = context; const { video, text, id3, initSegment } = remuxResult; const { details } = level; // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track const audio = this.altAudio ? undefined : remuxResult.audio; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed. if (this.fragContextChanged(frag)) { this.fragmentTracker.removeFragment(frag); return; } this.state = State.PARSING; if (initSegment) { if (initSegment != null && initSegment.tracks) { const mapFragment = frag.initSegment || frag; this._bufferInitSegment(level, initSegment.tracks, mapFragment, chunkMeta); hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, { frag: mapFragment, id, tracks: initSegment.tracks }); } // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038 const initPTS = initSegment.initPTS; const timescale = initSegment.timescale; if (isFiniteNumber(initPTS)) { this.initPTS[frag.cc] = { baseTime: initPTS, timescale }; hls.trigger(Events.INIT_PTS_FOUND, { frag, id, initPTS, timescale }); } } // Avoid buffering if backtracking this fragment if (video && details && frag.sn !== 'initSegment') { const prevFrag = details.fragments[frag.sn - 1 - details.startSN]; const isFirstFragment = frag.sn === details.startSN; const isFirstInDiscontinuity = !prevFrag || frag.cc > prevFrag.cc; if (remuxResult.independent !== false) { const { startPTS, endPTS, startDTS, endDTS } = video; if (part) { part.elementaryStreams[video.type] = { startPTS, endPTS, startDTS, endDTS }; } else { if (video.firstKeyFrame && video.independent && chunkMeta.id === 1 && !isFirstInDiscontinuity) { this.couldBacktrack = true; } if (video.dropped && video.independent) { // Backtrack if dropped frames create a gap after currentTime const bufferInfo = this.getMainFwdBufferInfo(); const targetBufferTime = (bufferInfo ? bufferInfo.end : this.getLoadPosition()) + this.config.maxBufferHole; const startTime = video.firstKeyFramePTS ? video.firstKeyFramePTS : startPTS; if (!isFirstFragment && targetBufferTime < startTime - this.config.maxBufferHole && !isFirstInDiscontinuity) { this.backtrack(frag); return; } else if (isFirstInDiscontinuity) { // Mark segment with a gap to avoid loop loading frag.gap = true; } // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial frag.setElementaryStreamInfo(video.type, frag.start, endPTS, frag.start, endDTS, true); } } frag.setElementaryStreamInfo(video.type, startPTS, endPTS, startDTS, endDTS); if (this.backtrackFragment) { this.backtrackFragment = frag; } this.bufferFragmentData(video, frag, part, chunkMeta, isFirstFragment || isFirstInDiscontinuity); } else if (isFirstFragment || isFirstInDiscontinuity) { // Mark segment with a gap to avoid loop loading frag.gap = true; } else { this.backtrack(frag); return; } } if (audio) { const { startPTS, endPTS, startDTS, endDTS } = audio; if (part) { part.elementaryStreams[ElementaryStreamTypes.AUDIO] = { startPTS, endPTS, startDTS, endDTS }; } frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO, startPTS, endPTS, startDTS, endDTS); this.bufferFragmentData(audio, frag, part, chunkMeta); } if (details && id3 != null && (_id3$samples = id3.samples) != null && _id3$samples.length) { const emittedID3 = { id, frag, details, samples: id3.samples }; hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3); } if (details && text) { const emittedText = { id, frag, details, samples: text.samples }; hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText); } } _bufferInitSegment(currentLevel, tracks, frag, chunkMeta) { if (this.state !== State.PARSING) { return; } this.audioOnly = !!tracks.audio && !tracks.video; // if audio track is expected to come from audio stream controller, discard any coming from main if (this.altAudio && !this.audioOnly) { delete tracks.audio; } // include levelCodec in audio and video tracks const { audio, video, audiovideo } = tracks; if (audio) { let audioCodec = currentLevel.audioCodec; const ua = navigator.userAgent.toLowerCase(); if (this.audioCodecSwitch) { if (audioCodec) { if (audioCodec.indexOf('mp4a.40.5') !== -1) { audioCodec = 'mp4a.40.2'; } else { audioCodec = 'mp4a.40.5'; } } // In the case that AAC and HE-AAC audio codecs are signalled in manifest, // force HE-AAC, as it seems that most browsers prefers it. // don't force HE-AAC if mono stream, or in Firefox if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) { audioCodec = 'mp4a.40.5'; } } // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') { // Exclude mpeg audio audioCodec = 'mp4a.40.2'; this.log(`Android: force audio codec to ${audioCodec}`); } if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) { this.log(`Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`); } audio.levelCodec = audioCodec; audio.id = 'main'; this.log(`Init audio buffer, container:${audio.container}, codecs[selected/level/parsed]=[${audioCodec || ''}/${currentLevel.audioCodec || ''}/${audio.codec}]`); } if (video) { video.levelCodec = currentLevel.videoCodec; video.id = 'main'; this.log(`Init video buffer, container:${video.container}, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${video.codec}]`); } if (audiovideo) { this.log(`Init audiovideo buffer, container:${audiovideo.container}, codecs[level/parsed]=[${currentLevel.attrs.CODECS || ''}/${audiovideo.codec}]`); } this.hls.trigger(Events.BUFFER_CODECS, tracks); // loop through tracks that are going to be provided to bufferController Object.keys(tracks).forEach(trackName => { const track = tracks[trackName]; const initSegment = track.initSegment; if (initSegment != null && initSegment.byteLength) { this.hls.trigger(Events.BUFFER_APPENDING, { type: trackName, data: initSegment, frag, part: null, chunkMeta, parent: frag.type }); } }); // trigger handler right now this.tick(); } getMainFwdBufferInfo() { return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN); } backtrack(frag) { this.couldBacktrack = true; // Causes findFragments to backtrack through fragments to find the keyframe this.backtrackFragment = frag; this.resetTransmuxer(); this.flushBufferGap(frag); this.fragmentTracker.removeFragment(frag); this.fragPrevious = null; this.nextLoadPosition = frag.start; this.state = State.IDLE; } checkFragmentChanged() { const video = this.media; let fragPlayingCurrent = null; if (video && video.readyState > 1 && video.seeking === false) { const currentTime = video.currentTime; /* if video element is in seeked state, currentTime can only increase. (assuming that playback rate is positive ...) As sometimes currentTime jumps back to zero after a media decode error, check this, to avoid seeking back to wrong position after a media decode error */ if (BufferHelper.isBuffered(video, currentTime)) { fragPlayingCurrent = this.getAppendedFrag(currentTime); } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) { /* ensure that FRAG_CHANGED event is triggered at startup, when first video frame is displayed and playback is paused. add a tolerance of 100ms, in case current position is not buffered, check if current pos+100ms is buffered and use that buffer range for FRAG_CHANGED event reporting */ fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1); } if (fragPlayingCurrent) { this.backtrackFragment = null; const fragPlaying = this.fragPlaying; const fragCurrentLevel = fragPlayingCurrent.level; if (!fragPlaying || fragPlayingCurrent.sn !== fragPlaying.sn || fragPlaying.level !== fragCurrentLevel || fragPlayingCurrent.urlId !== fragPlaying.urlId) { this.fragPlaying = fragPlayingCurrent; this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent }); if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) { this.hls.trigger(Events.LEVEL_SWITCHED, { level: fragCurrentLevel }); } } } } } get nextLevel() { const frag = this.nextBufferedFrag; if (frag) { return frag.level; } return -1; } get currentFrag() { const media = this.media; if (media) { return this.fragPlaying || this.getAppendedFrag(media.currentTime); } return null; } get currentProgramDateTime() { const media = this.media; if (media) { const currentTime = media.currentTime; const frag = this.currentFrag; if (frag && isFiniteNumber(currentTime) && isFiniteNumber(frag.programDateTime)) { const epocMs = frag.programDateTime + (currentTime - frag.start) * 1000; return new Date(epocMs); } } return null; } get currentLevel() { const frag = this.currentFrag; if (frag) { return frag.level; } return -1; } get nextBufferedFrag() { const frag = this.currentFrag; if (frag) { return this.followingBufferedFrag(frag); } return null; } get forceStartLoad() { return this._forceStartLoad; } } /* * compute an Exponential Weighted moving average * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average * - heavily inspired from shaka-player */ class EWMA { // About half of the estimated value will be from the last |halfLife| samples by weight. constructor(halfLife, estimate = 0, weight = 0) { this.halfLife = void 0; this.alpha_ = void 0; this.estimate_ = void 0; this.totalWeight_ = void 0; this.halfLife = halfLife; // Larger values of alpha expire historical data more slowly. this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0; this.estimate_ = estimate; this.totalWeight_ = weight; } sample(weight, value) { const adjAlpha = Math.pow(this.alpha_, weight); this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_; this.totalWeight_ += weight; } getTotalWeight() { return this.totalWeight_; } getEstimate() { if (this.alpha_) { const zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_); if (zeroFactor) { return this.estimate_ / zeroFactor; } } return this.estimate_; } } /* * EWMA Bandwidth Estimator * - heavily inspired from shaka-player * Tracks bandwidth samples and estimates available bandwidth. * Based on the minimum of two exponentially-weighted moving averages with * different half-lives. */ class EwmaBandWidthEstimator { constructor(slow, fast, defaultEstimate, defaultTTFB = 100) { this.defaultEstimate_ = void 0; this.minWeight_ = void 0; this.minDelayMs_ = void 0; this.slow_ = void 0; this.fast_ = void 0; this.defaultTTFB_ = void 0; this.ttfb_ = void 0; this.defaultEstimate_ = defaultEstimate; this.minWeight_ = 0.001; this.minDelayMs_ = 50; this.slow_ = new EWMA(slow); this.fast_ = new EWMA(fast); this.defaultTTFB_ = defaultTTFB; this.ttfb_ = new EWMA(slow); } update(slow, fast) { const { slow_, fast_, ttfb_ } = this; if (slow_.halfLife !== slow) { this.slow_ = new EWMA(slow, slow_.getEstimate(), slow_.getTotalWeight()); } if (fast_.halfLife !== fast) { this.fast_ = new EWMA(fast, fast_.getEstimate(), fast_.getTotalWeight()); } if (ttfb_.halfLife !== slow) { this.ttfb_ = new EWMA(slow, ttfb_.getEstimate(), ttfb_.getTotalWeight()); } } sample(durationMs, numBytes) { durationMs = Math.max(durationMs, this.minDelayMs_); const numBits = 8 * numBytes; // weight is duration in seconds const durationS = durationMs / 1000; // value is bandwidth in bits/s const bandwidthInBps = numBits / durationS; this.fast_.sample(durationS, bandwidthInBps); this.slow_.sample(durationS, bandwidthInBps); } sampleTTFB(ttfb) { // weight is frequency curve applied to TTFB in seconds // (longer times have less weight with expected input under 1 second) const seconds = ttfb / 1000; const weight = Math.sqrt(2) * Math.exp(-Math.pow(seconds, 2) / 2); this.ttfb_.sample(weight, Math.max(ttfb, 5)); } canEstimate() { return this.fast_.getTotalWeight() >= this.minWeight_; } getEstimate() { if (this.canEstimate()) { // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate())); // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate())); // Take the minimum of these two estimates. This should have the effect of // adapting down quickly, but up more slowly. return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate()); } else { return this.defaultEstimate_; } } getEstimateTTFB() { if (this.ttfb_.getTotalWeight() >= this.minWeight_) { return this.ttfb_.getEstimate(); } else { return this.defaultTTFB_; } } destroy() {} } class AbrController { constructor(hls) { this.hls = void 0; this.lastLevelLoadSec = 0; this.lastLoadedFragLevel = 0; this._nextAutoLevel = -1; this.timer = -1; this.onCheck = this._abandonRulesCheck.bind(this); this.fragCurrent = null; this.partCurrent = null; this.bitrateTestDelay = 0; this.bwEstimator = void 0; this.hls = hls; const config = hls.config; this.bwEstimator = new EwmaBandWidthEstimator(config.abrEwmaSlowVoD, config.abrEwmaFastVoD, config.abrEwmaDefaultEstimate); this.registerListeners(); } registerListeners() { const { hls } = this; hls.on(Events.FRAG_LOADING, this.onFragLoading, this); hls.on(Events.FRAG_LOADED, this.onFragLoaded, this); hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this); hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this); } unregisterListeners() { const { hls } = this; hls.off(Events.FRAG_LOADING, this.onFragLoading, this); hls.off(Events.FRAG_LOADED, this.onFragLoaded, this); hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this); hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this); } destroy() { this.unregisterListeners(); this.clearTimer(); // @ts-ignore this.hls = this.onCheck = null; this.fragCurrent = this.partCurrent = null; } onFragLoading(event, data) { var _data$part; const frag = data.frag; if (this.ignoreFragment(frag)) { return; } this.fragCurrent = frag; this.partCurrent = (_data$part = data.part) != null ? _data$part : null; this.clearTimer(); this.timer = self.setInterval(this.onCheck, 100); } onLevelSwitching(event, data) { this.clearTimer(); } getTimeToLoadFrag(timeToFirstByteSec, bandwidth, fragSizeBits, isSwitch) { const fragLoadSec = timeToFirstByteSec + fragSizeBits / bandwidth; const playlistLoadSec = isSwitch ? this.lastLevelLoadSec : 0; return fragLoadSec + playlistLoadSec; } onLevelLoaded(event, data) { const config = this.hls.config; const { total, bwEstimate } = data.stats; // Total is the bytelength and bwEstimate in bits/sec if (isFiniteNumber(total) && isFiniteNumber(bwEstimate)) { this.lastLevelLoadSec = 8 * total / bwEstimate; } if (data.details.live) { this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive); } else { this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD); } } /* This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load quickly enough to prevent underbuffering */ _abandonRulesCheck() { const { fragCurrent: frag, partCurrent: part, hls } = this; const { autoLevelEnabled, media } = hls; if (!frag || !media) { return; } const now = performance.now(); const stats = part ? part.stats : frag.stats; const duration = part ? part.duration : frag.duration; const timeLoading = now - stats.loading.start; // If frag loading is aborted, complete, or from lowest level, stop timer and return if (stats.aborted || stats.loaded && stats.loaded === stats.total || frag.level === 0) { this.clearTimer(); // reset forced auto level value so that next level will be selected this._nextAutoLevel = -1; return; } // This check only runs if we're in ABR mode and actually playing if (!autoLevelEnabled || media.paused || !media.playbackRate || !media.readyState) { return; } const bufferInfo = hls.mainForwardBufferInfo; if (bufferInfo === null) { return; } const ttfbEstimate = this.bwEstimator.getEstimateTTFB(); const playbackRate = Math.abs(media.playbackRate); // To maintain stable adaptive playback, only begin monitoring frag loading after half or more of its playback duration has passed if (timeLoading <= Math.max(ttfbEstimate, 1000 * (duration / (playbackRate * 2)))) { return; } // bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer const bufferStarvationDelay = bufferInfo.len / playbackRate; // Only downswitch if less than 2 fragment lengths are buffered if (bufferStarvationDelay >= 2 * duration / playbackRate) { return; } const ttfb = stats.loading.first ? stats.loading.first - stats.loading.start : -1; const loadedFirstByte = stats.loaded && ttfb > -1; const bwEstimate = this.bwEstimator.getEstimate(); const { levels, minAutoLevel } = hls; const level = levels[frag.level]; const expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.maxBitrate / 8)); let timeStreaming = timeLoading - ttfb; if (timeStreaming < 1 && loadedFirstByte) { timeStreaming = Math.min(timeLoading, stats.loaded * 8 / bwEstimate); } const loadRate = loadedFirstByte ? stats.loaded * 1000 / timeStreaming : 0; // fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the remainder of the fragment const fragLoadedDelay = loadRate ? (expectedLen - stats.loaded) / loadRate : expectedLen * 8 / bwEstimate + ttfbEstimate / 1000; // Only downswitch if the time to finish loading the current fragment is greater than the amount of buffer left if (fragLoadedDelay <= bufferStarvationDelay) { return; } const bwe = loadRate ? loadRate * 8 : bwEstimate; let fragLevelNextLoadedDelay = Number.POSITIVE_INFINITY; let nextLoadLevel; // Iterate through lower level and try to find the largest one that avoids rebuffering for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) { // compute time to load next fragment at lower level // 8 = bits per byte (bps/Bps) const levelNextBitrate = levels[nextLoadLevel].maxBitrate; fragLevelNextLoadedDelay = this.getTimeToLoadFrag(ttfbEstimate / 1000, bwe, duration * levelNextBitrate, !levels[nextLoadLevel].details); if (fragLevelNextLoadedDelay < bufferStarvationDelay) { break; } } // Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing // to load the current one if (fragLevelNextLoadedDelay >= fragLoadedDelay) { return; } // if estimated load time of new segment is completely unreasonable, ignore and do not emergency switch down if (fragLevelNextLoadedDelay > duration * 10) { return; } hls.nextLoadLevel = nextLoadLevel; if (loadedFirstByte) { // If there has been loading progress, sample bandwidth using loading time offset by minimum TTFB time this.bwEstimator.sample(timeLoading - Math.min(ttfbEstimate, ttfb), stats.loaded); } else { // If there has been no loading progress, sample TTFB this.bwEstimator.sampleTTFB(timeLoading); } this.clearTimer(); logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly; Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s TTFB estimate: ${ttfb} Current BW estimate: ${isFiniteNumber(bwEstimate) ? (bwEstimate / 1024).toFixed(3) : 'Unknown'} Kb/s New BW estimate: ${(this.bwEstimator.getEstimate() / 1024).toFixed(3)} Kb/s Aborting and switching to level ${nextLoadLevel}`); if (frag.loader) { this.fragCurrent = this.partCurrent = null; frag.abortRequests(); } hls.trigger(Events.FRAG_LOAD_EMERGENCY_ABORTED, { frag, part, stats }); } onFragLoaded(event, { frag, part }) { const stats = part ? part.stats : frag.stats; if (frag.type === PlaylistLevelType.MAIN) { this.bwEstimator.sampleTTFB(stats.loading.first - stats.loading.start); } if (this.ignoreFragment(frag)) { return; } // stop monitoring bw once frag loaded this.clearTimer(); // store level id after successful fragment load this.lastLoadedFragLevel = frag.level; // reset forced auto level value so that next level will be selected this._nextAutoLevel = -1; // compute level average bitrate if (this.hls.config.abrMaxWithRealBitrate) { const duration = part ? part.duration : frag.duration; const level = this.hls.levels[frag.level]; const loadedBytes = (level.loaded ? level.loaded.bytes : 0) + stats.loaded; const loadedDuration = (level.loaded ? level.loaded.duration : 0) + duration; level.loaded = { bytes: loadedBytes, duration: loadedDuration }; level.realBitrate = Math.round(8 * loadedBytes / loadedDuration); } if (frag.bitrateTest) { const fragBufferedData = { stats, frag, part, id: frag.type }; this.onFragBuffered(Events.FRAG_BUFFERED, fragBufferedData); frag.bitrateTest = false; } } onFragBuffered(event, data) { const { frag, part } = data; const stats = part != null && part.stats.loaded ? part.stats : frag.stats; if (stats.aborted) { return; } if (this.ignoreFragment(frag)) { return; } // Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing; // rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch // is used. If we used buffering in that case, our BW estimate sample will be very large. const processingMs = stats.parsing.end - stats.loading.start - Math.min(stats.loading.first - stats.loading.start, this.bwEstimator.getEstimateTTFB()); this.bwEstimator.sample(processingMs, stats.loaded); stats.bwEstimate = this.bwEstimator.getEstimate(); if (frag.bitrateTest) { this.bitrateTestDelay = processingMs / 1000; } else { this.bitrateTestDelay = 0; } } ignoreFragment(frag) { // Only count non-alt-audio frags which were actually buffered in our BW calculations return frag.type !== PlaylistLevelType.MAIN || frag.sn === 'initSegment'; } clearTimer() { self.clearInterval(this.timer); } // return next auto level get nextAutoLevel() { const forcedAutoLevel = this._nextAutoLevel; const bwEstimator = this.bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value if (forcedAutoLevel !== -1 && !bwEstimator.canEstimate()) { return forcedAutoLevel; } // compute next level using ABR logic let nextABRAutoLevel = this.getNextABRAutoLevel(); // use forced auto level when ABR selected level has errored if (forcedAutoLevel !== -1) { const levels = this.hls.levels; if (levels.length > Math.max(forcedAutoLevel, nextABRAutoLevel) && levels[forcedAutoLevel].loadError <= levels[nextABRAutoLevel].loadError) { return forcedAutoLevel; } } // if forced auto level has been defined, use it to cap ABR computed quality level if (forcedAutoLevel !== -1) { nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel); } return nextABRAutoLevel; } getNextABRAutoLevel() { const { fragCurrent, partCurrent, hls } = this; const { maxAutoLevel, config, minAutoLevel, media } = hls; const currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0; // playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as // if we're playing back at the normal rate. const playbackRate = media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0; const avgbw = this.bwEstimator ? this.bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted. const bufferInfo = hls.mainForwardBufferInfo; const bufferStarvationDelay = (bufferInfo ? bufferInfo.len : 0) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all let bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor); if (bestLevel >= 0) { return bestLevel; } logger.trace(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, finding optimal quality level`); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering // if no matching level found, logic will return 0 let maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay; let bwFactor = config.abrBandWidthFactor; let bwUpFactor = config.abrBandWidthUpFactor; if (!bufferStarvationDelay) { // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test const bitrateTestDelay = this.bitrateTestDelay; if (bitrateTestDelay) { // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value // max video loading delay used in automatic start level selection : // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level + // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` ) // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay; maxStarvationDelay = maxLoadingDelay - bitrateTestDelay; logger.trace(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`); // don't use conservative factor on bitrate test bwFactor = bwUpFactor = 1; } } bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor); return Math.max(bestLevel, 0); } findBestLevel(currentBw, minAutoLevel, maxAutoLevel, maxFetchDuration, bwFactor, bwUpFactor) { var _level$details; const { fragCurrent, partCurrent, lastLoadedFragLevel: currentLevel } = this; const { levels } = this.hls; const level = levels[currentLevel]; const live = !!(level != null && (_level$details = level.details) != null && _level$details.live); const currentCodecSet = level == null ? void 0 : level.codecSet; const currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0; const ttfbEstimateSec = this.bwEstimator.getEstimateTTFB() / 1000; let levelSkippedMin = minAutoLevel; let levelSkippedMax = -1; for (let i = maxAutoLevel; i >= minAutoLevel; i--) { const levelInfo = levels[i]; if (!levelInfo || currentCodecSet && levelInfo.codecSet !== currentCodecSet) { if (levelInfo) { levelSkippedMin = Math.min(i, levelSkippedMin); levelSkippedMax = Math.max(i, levelSkippedMax); } continue; } if (levelSkippedMax !== -1) { logger.trace(`[abr] Skipped level(s) ${levelSkippedMin}-${levelSkippedMax} with CODECS:"${levels[levelSkippedMax].attrs.CODECS}"; not compatible with "${level.attrs.CODECS}"`); } const levelDetails = levelInfo.details; const avgDuration = (partCurrent ? levelDetails == null ? void 0 : levelDetails.partTarget : levelDetails == null ? void 0 : levelDetails.averagetargetduration) || currentFragDuration; let adjustedbw; // follow algorithm captured from stagefright : // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp // Pick the highest bandwidth stream below or equal to estimated bandwidth. // consider only 80% of the available bandwidth, but if we are switching up, // be even more conservative (70%) to avoid overestimating and immediately // switching back. if (i <= currentLevel) { adjustedbw = bwFactor * currentBw; } else { adjustedbw = bwUpFactor * currentBw; } const bitrate = levels[i].maxBitrate; const fetchDuration = this.getTimeToLoadFrag(ttfbEstimateSec, adjustedbw, bitrate * avgDuration, levelDetails === undefined); logger.trace(`[abr] level:${i} adjustedbw-bitrate:${Math.round(adjustedbw - bitrate)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)}`); // if adjusted bw is greater than level bitrate AND if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ... // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1 fetchDuration === 0 || !isFiniteNumber(fetchDuration) || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) { // as we are looping from highest to lowest, this will return the best achievable quality level return i; } } // not enough time budget even with quality level 0 ... rebuffering might happen return -1; } set nextAutoLevel(nextLevel) { this._nextAutoLevel = nextLevel; } } class ChunkCache { constructor() { this.chunks = []; this.dataLength = 0; } push(chunk) { this.chunks.push(chunk); this.dataLength += chunk.length; } flush() { const { chunks, dataLength } = this; let result; if (!chunks.length) { return new Uint8Array(0); } else if (chunks.length === 1) { result = chunks[0]; } else { result = concatUint8Arrays(chunks, dataLength); } this.reset(); return result; } reset() { this.chunks.length = 0; this.dataLength = 0; } } function concatUint8Arrays(chunks, dataLength) { const result = new Uint8Array(dataLength); let offset = 0; for (let i = 0; i < chunks.length; i++) { const chunk = chunks[i]; result.set(chunk, offset); offset += chunk.length; } return result; } const TICK_INTERVAL$1 = 100; // how often to tick in ms class AudioStreamController extends BaseStreamController { constructor(hls, fragmentTracker, keyLoader) { super(hls, fragmentTracker, keyLoader, '[audio-stream-controller]', PlaylistLevelType.AUDIO); this.videoBuffer = null; this.videoTrackCC = -1; this.waitingVideoCC = -1; this.bufferedTrack = null; this.switchingTrack = null; this.trackId = -1; this.waitingData = null; this.mainDetails = null; this.bufferFlushed = false; this.cachedTrackLoadedData = null; this._registerListeners(); } onHandlerDestroying() { this._unregisterListeners(); this.mainDetails = null; this.bufferedTrack = null; this.switchingTrack = null; } _registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this); hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this); hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this); hls.on(Events.ERROR, this.onError, this); hls.on(Events.BUFFER_RESET, this.onBufferReset, this); hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this); hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this); hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this); hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this); hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this); hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this); hls.off(Events.ERROR, this.onError, this); hls.off(Events.BUFFER_RESET, this.onBufferReset, this); hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this); hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this); hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this); hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this); } // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value onInitPtsFound(event, { frag, id, initPTS, timescale }) { // Always update the new INIT PTS // Can change due level switch if (id === 'main') { const cc = frag.cc; this.initPTS[frag.cc] = { baseTime: initPTS, timescale }; this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`); this.videoTrackCC = cc; // If we are waiting, tick immediately to unblock audio fragment transmuxing if (this.state === State.WAITING_INIT_PTS) { this.tick(); } } } startLoad(startPosition) { if (!this.levels) { this.startPosition = startPosition; this.state = State.STOPPED; return; } const lastCurrentTime = this.lastCurrentTime; this.stopLoad(); this.setInterval(TICK_INTERVAL$1); if (lastCurrentTime > 0 && startPosition === -1) { this.log(`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`); startPosition = lastCurrentTime; this.state = State.IDLE; } else { this.loadedmetadata = false; this.state = State.WAITING_TRACK; } this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition; this.tick(); } doTick() { switch (this.state) { case State.IDLE: this.doTickIdle(); break; case State.WAITING_TRACK: { var _levels$trackId; const { levels, trackId } = this; const details = levels == null ? void 0 : (_levels$trackId = levels[trackId]) == null ? void 0 : _levels$trackId.details; if (details) { if (this.waitForCdnTuneIn(details)) { break; } this.state = State.WAITING_INIT_PTS; } break; } case State.FRAG_LOADING_WAITING_RETRY: { var _this$media; const now = performance.now(); const retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading if (!retryDate || now >= retryDate || (_this$media = this.media) != null && _this$media.seeking) { this.log('RetryDate reached, switch back to IDLE state'); this.resetStartWhenNotLoaded(this.trackId); this.state = State.IDLE; } break; } case State.WAITING_INIT_PTS: { // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS const waitingData = this.waitingData; if (waitingData) { const { frag, part, cache, complete } = waitingData; if (this.initPTS[frag.cc] !== undefined) { this.waitingData = null; this.waitingVideoCC = -1; this.state = State.FRAG_LOADING; const payload = cache.flush(); const data = { frag, part, payload, networkDetails: null }; this._handleFragmentLoadProgress(data); if (complete) { super._handleFragmentLoadComplete(data); } } else if (this.videoTrackCC !== this.waitingVideoCC) { // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found this.log(`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`); this.clearWaitingFragment(); } else { // Drop waiting fragment if an earlier fragment is needed const pos = this.getLoadPosition(); const bufferInfo = BufferHelper.bufferInfo(this.mediaBuffer, pos, this.config.maxBufferHole); const waitingFragmentAtPosition = fragmentWithinToleranceTest(bufferInfo.end, this.config.maxFragLookUpTolerance, frag); if (waitingFragmentAtPosition < 0) { this.log(`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`); this.clearWaitingFragment(); } } } else { this.state = State.IDLE; } } } this.onTickEnd(); } clearWaitingFragment() { const waitingData = this.waitingData; if (waitingData) { this.fragmentTracker.removeFragment(waitingData.frag); this.waitingData = null; this.waitingVideoCC = -1; this.state = State.IDLE; } } resetLoadingState() { this.clearWaitingFragment(); super.resetLoadingState(); } onTickEnd() { const { media } = this; if (!(media != null && media.readyState)) { // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0) return; } this.lastCurrentTime = media.currentTime; } doTickIdle() { const { hls, levels, media, trackId } = this; const config = hls.config; if (!(levels != null && levels[trackId])) { return; } // if video not attached AND // start fragment already requested OR start frag prefetch not enabled // exit loop // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop if (!media && (this.startFragRequested || !config.startFragPrefetch)) { return; } const levelInfo = levels[trackId]; const trackDetails = levelInfo.details; if (!trackDetails || trackDetails.live && this.levelLastLoaded !== trackId || this.waitForCdnTuneIn(trackDetails)) { this.state = State.WAITING_TRACK; return; } const bufferable = this.mediaBuffer ? this.mediaBuffer : this.media; if (this.bufferFlushed && bufferable) { this.bufferFlushed = false; this.afterBufferFlushed(bufferable, ElementaryStreamTypes.AUDIO, PlaylistLevelType.AUDIO); } const bufferInfo = this.getFwdBufferInfo(bufferable, PlaylistLevelType.AUDIO); if (bufferInfo === null) { return; } const { bufferedTrack, switchingTrack } = this; if (!switchingTrack && this._streamEnded(bufferInfo, trackDetails)) { hls.trigger(Events.BUFFER_EOS, { type: 'audio' }); this.state = State.ENDED; return; } const mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, PlaylistLevelType.MAIN); const bufferLen = bufferInfo.len; const maxBufLen = this.getMaxBufferLength(mainBufferInfo == null ? void 0 : mainBufferInfo.len); // if buffer length is less than maxBufLen try to load a new fragment if (bufferLen >= maxBufLen && !switchingTrack) { return; } const fragments = trackDetails.fragments; const start = fragments[0].start; let targetBufferTime = bufferInfo.end; if (switchingTrack && media) { const pos = this.getLoadPosition(); if (bufferedTrack && switchingTrack.attrs !== bufferedTrack.attrs) { targetBufferTime = pos; } // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime if (trackDetails.PTSKnown && pos < start) { // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start if (bufferInfo.end > start || bufferInfo.nextStart) { this.log('Alt audio track ahead of main track, seek to start of alt audio track'); media.currentTime = start + 0.05; } } } let frag = this.getNextFragment(targetBufferTime, trackDetails); let atGap = false; // Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags if (frag && this.isLoopLoading(frag, targetBufferTime)) { atGap = !!frag.gap; frag = this.getNextFragmentLoopLoading(frag, trackDetails, bufferInfo, PlaylistLevelType.MAIN, maxBufLen); } if (!frag) { this.bufferFlushed = true; return; } // Buffer audio up to one target duration ahead of main buffer const atBufferSyncLimit = mainBufferInfo && frag.start > mainBufferInfo.end + trackDetails.targetduration; if (atBufferSyncLimit || // Or wait for main buffer after buffing some audio !(mainBufferInfo != null && mainBufferInfo.len) && bufferInfo.len) { // Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo const mainFrag = this.getAppendedFrag(frag.start, PlaylistLevelType.MAIN); if (mainFrag === null) { return; } // Bridge gaps in main buffer atGap || (atGap = !!mainFrag.gap || !!atBufferSyncLimit && mainBufferInfo.len === 0); if (atBufferSyncLimit && !atGap || atGap && bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end) { return; } } this.loadFragment(frag, levelInfo, targetBufferTime); } getMaxBufferLength(mainBufferLength) { const maxConfigBuffer = super.getMaxBufferLength(); if (!mainBufferLength) { return maxConfigBuffer; } return Math.min(Math.max(maxConfigBuffer, mainBufferLength), this.config.maxMaxBufferLength); } onMediaDetaching() { this.videoBuffer = null; super.onMediaDetaching(); } onAudioTracksUpdated(event, { audioTracks }) { this.resetTransmuxer(); this.levels = audioTracks.map(mediaPlaylist => new Level(mediaPlaylist)); } onAudioTrackSwitching(event, data) { // if any URL found on new audio track, it is an alternate audio track const altAudio = !!data.url; this.trackId = data.id; const { fragCurrent } = this; if (fragCurrent) { fragCurrent.abortRequests(); this.removeUnbufferedFrags(fragCurrent.start); } this.resetLoadingState(); // destroy useless transmuxer when switching audio to main if (!altAudio) { this.resetTransmuxer(); } else { // switching to audio track, start timer if not already started this.setInterval(TICK_INTERVAL$1); } // should we switch tracks ? if (altAudio) { this.switchingTrack = data; // main audio track are handled by stream-controller, just do something if switching to alt audio track this.state = State.IDLE; } else { this.switchingTrack = null; this.bufferedTrack = data; this.state = State.STOPPED; } this.tick(); } onManifestLoading() { this.fragmentTracker.removeAllFragments(); this.startPosition = this.lastCurrentTime = 0; this.bufferFlushed = false; this.levels = this.mainDetails = this.waitingData = this.bufferedTrack = this.cachedTrackLoadedData = this.switchingTrack = null; this.startFragRequested = false; this.trackId = this.videoTrackCC = this.waitingVideoCC = -1; } onLevelLoaded(event, data) { this.mainDetails = data.details; if (this.cachedTrackLoadedData !== null) { this.hls.trigger(Events.AUDIO_TRACK_LOADED, this.cachedTrackLoadedData); this.cachedTrackLoadedData = null; } } onAudioTrackLoaded(event, data) { var _track$details; if (this.mainDetails == null) { this.cachedTrackLoadedData = data; return; } const { levels } = this; const { details: newDetails, id: trackId } = data; if (!levels) { this.warn(`Audio tracks were reset while loading level ${trackId}`); return; } this.log(`Track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn ? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]` : ''},duration:${newDetails.totalduration}`); const track = levels[trackId]; let sliding = 0; if (newDetails.live || (_track$details = track.details) != null && _track$details.live) { this.checkLiveUpdate(newDetails); const mainDetails = this.mainDetails; if (newDetails.deltaUpdateFailed || !mainDetails) { return; } if (!track.details && newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) { // Make sure our audio rendition is aligned with the "main" rendition, using // pdt as our reference times. alignMediaPlaylistByPDT(newDetails, mainDetails); sliding = newDetails.fragments[0].start; } else { sliding = this.alignPlaylists(newDetails, track.details); } } track.details = newDetails; this.levelLastLoaded = trackId; // compute start position if we are aligned with the main playlist if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) { this.setStartPosition(track.details, sliding); } // only switch back to IDLE state if we were waiting for track to start downloading a new fragment if (this.state === State.WAITING_TRACK && !this.waitForCdnTuneIn(newDetails)) { this.state = State.IDLE; } // trigger handler right now this.tick(); } _handleFragmentLoadProgress(data) { var _frag$initSegment; const { frag, part, payload } = data; const { config, trackId, levels } = this; if (!levels) { this.warn(`Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`); return; } const track = levels[trackId]; if (!track) { this.warn('Audio track is undefined on fragment load progress'); return; } const details = track.details; if (!details) { this.warn('Audio track details undefined on fragment load progress'); this.removeUnbufferedFrags(frag.start); return; } const audioCodec = config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2'; let transmuxer = this.transmuxer; if (!transmuxer) { transmuxer = this.transmuxer = new TransmuxerInterface(this.hls, PlaylistLevelType.AUDIO, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this)); } // Check if we have video initPTS // If not we need to wait for it const initPTS = this.initPTS[frag.cc]; const initSegmentData = (_frag$initSegment = frag.initSegment) == null ? void 0 : _frag$initSegment.data; if (initPTS !== undefined) { // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`); // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) const accurateTimeOffset = false; // details.PTSKnown || !details.live; const partIndex = part ? part.index : -1; const partial = partIndex !== -1; const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial); transmuxer.push(payload, initSegmentData, audioCodec, '', frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS); } else { this.log(`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`); const { cache } = this.waitingData = this.waitingData || { frag, part, cache: new ChunkCache(), complete: false }; cache.push(new Uint8Array(payload)); this.waitingVideoCC = this.videoTrackCC; this.state = State.WAITING_INIT_PTS; } } _handleFragmentLoadComplete(fragLoadedData) { if (this.waitingData) { this.waitingData.complete = true; return; } super._handleFragmentLoadComplete(fragLoadedData); } onBufferReset( /* event: Events.BUFFER_RESET */ ) { // reset reference to sourcebuffers this.mediaBuffer = this.videoBuffer = null; this.loadedmetadata = false; } onBufferCreated(event, data) { const audioTrack = data.tracks.audio; if (audioTrack) { this.mediaBuffer = audioTrack.buffer || null; } if (data.tracks.video) { this.videoBuffer = data.tracks.video.buffer || null; } } onFragBuffered(event, data) { const { frag, part } = data; if (frag.type !== PlaylistLevelType.AUDIO) { if (!this.loadedmetadata && frag.type === PlaylistLevelType.MAIN) { const bufferable = this.videoBuffer || this.media; if (bufferable) { const bufferedTimeRanges = BufferHelper.getBuffered(bufferable); if (bufferedTimeRanges.length) { this.loadedmetadata = true; } } } return; } if (this.fragContextChanged(frag)) { // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion // Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer this.warn(`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${frag.level} finished buffering, but was aborted. state: ${this.state}, audioSwitch: ${this.switchingTrack ? this.switchingTrack.name : 'false'}`); return; } if (frag.sn !== 'initSegment') { this.fragPrevious = frag; const track = this.switchingTrack; if (track) { this.bufferedTrack = track; this.switchingTrack = null; this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, _objectSpread2({}, track)); } } this.fragBufferedComplete(frag, part); } onError(event, data) { var _data$context; if (data.fatal) { this.state = State.ERROR; return; } switch (data.details) { case ErrorDetails.FRAG_GAP: case ErrorDetails.FRAG_PARSING_ERROR: case ErrorDetails.FRAG_DECRYPT_ERROR: case ErrorDetails.FRAG_LOAD_ERROR: case ErrorDetails.FRAG_LOAD_TIMEOUT: case ErrorDetails.KEY_LOAD_ERROR: case ErrorDetails.KEY_LOAD_TIMEOUT: this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO, data); break; case ErrorDetails.AUDIO_TRACK_LOAD_ERROR: case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT: case ErrorDetails.LEVEL_PARSING_ERROR: // in case of non fatal error while loading track, if not retrying to load track, switch back to IDLE if (!data.levelRetry && this.state === State.WAITING_TRACK && ((_data$context = data.context) == null ? void 0 : _data$context.type) === PlaylistContextType.AUDIO_TRACK) { this.state = State.IDLE; } break; case ErrorDetails.BUFFER_FULL_ERROR: if (!data.parent || data.parent !== 'audio') { return; } if (this.reduceLengthAndFlushBuffer(data)) { this.bufferedTrack = null; super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio'); } break; case ErrorDetails.INTERNAL_EXCEPTION: this.recoverWorkerError(data); break; } } onBufferFlushed(event, { type }) { if (type === ElementaryStreamTypes.AUDIO) { this.bufferFlushed = true; if (this.state === State.ENDED) { this.state = State.IDLE; } } } _handleTransmuxComplete(transmuxResult) { var _id3$samples; const id = 'audio'; const { hls } = this; const { remuxResult, chunkMeta } = transmuxResult; const context = this.getCurrentContext(chunkMeta); if (!context) { this.resetWhenMissingContext(chunkMeta); return; } const { frag, part, level } = context; const { details } = level; const { audio, text, id3, initSegment } = remuxResult; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed. if (this.fragContextChanged(frag) || !details) { this.fragmentTracker.removeFragment(frag); return; } this.state = State.PARSING; if (this.switchingTrack && audio) { this.completeAudioSwitch(this.switchingTrack); } if (initSegment != null && initSegment.tracks) { const mapFragment = frag.initSegment || frag; this._bufferInitSegment(initSegment.tracks, mapFragment, chunkMeta); hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, { frag: mapFragment, id, tracks: initSegment.tracks }); // Only flush audio from old audio tracks when PTS is known on new audio track } if (audio) { const { startPTS, endPTS, startDTS, endDTS } = audio; if (part) { part.elementaryStreams[ElementaryStreamTypes.AUDIO] = { startPTS, endPTS, startDTS, endDTS }; } frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO, startPTS, endPTS, startDTS, endDTS); this.bufferFragmentData(audio, frag, part, chunkMeta); } if (id3 != null && (_id3$samples = id3.samples) != null && _id3$samples.length) { const emittedID3 = _extends({ id, frag, details }, id3); hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3); } if (text) { const emittedText = _extends({ id, frag, details }, text); hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText); } } _bufferInitSegment(tracks, frag, chunkMeta) { if (this.state !== State.PARSING) { return; } // delete any video track found on audio transmuxer if (tracks.video) { delete tracks.video; } // include levelCodec in audio and video tracks const track = tracks.audio; if (!track) { return; } track.levelCodec = track.codec; track.id = 'audio'; this.log(`Init audio buffer, container:${track.container}, codecs[parsed]=[${track.codec}]`); this.hls.trigger(Events.BUFFER_CODECS, tracks); const initSegment = track.initSegment; if (initSegment != null && initSegment.byteLength) { const segment = { type: 'audio', frag, part: null, chunkMeta, parent: frag.type, data: initSegment }; this.hls.trigger(Events.BUFFER_APPENDING, segment); } // trigger handler right now this.tick(); } loadFragment(frag, track, targetBufferTime) { // only load if fragment is not loaded or if in audio switch const fragState = this.fragmentTracker.getState(frag); this.fragCurrent = frag; // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch if (this.switchingTrack || fragState === FragmentState.NOT_LOADED || fragState === FragmentState.PARTIAL) { var _track$details2; if (frag.sn === 'initSegment') { this._loadInitSegment(frag, track); } else if ((_track$details2 = track.details) != null && _track$details2.live && !this.initPTS[frag.cc]) { this.log(`Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`); this.state = State.WAITING_INIT_PTS; } else { this.startFragRequested = true; super.loadFragment(frag, track, targetBufferTime); } } else { this.clearTrackerIfNeeded(frag); } } completeAudioSwitch(switchingTrack) { const { hls, media, bufferedTrack } = this; const bufferedAttributes = bufferedTrack == null ? void 0 : bufferedTrack.attrs; const switchAttributes = switchingTrack.attrs; if (media && bufferedAttributes && (bufferedAttributes.CHANNELS !== switchAttributes.CHANNELS || bufferedAttributes.NAME !== switchAttributes.NAME || bufferedAttributes.LANGUAGE !== switchAttributes.LANGUAGE)) { this.log('Switching audio track : flushing all audio'); super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio'); } this.bufferedTrack = switchingTrack; this.switchingTrack = null; hls.trigger(Events.AUDIO_TRACK_SWITCHED, _objectSpread2({}, switchingTrack)); } } class AudioTrackController extends BasePlaylistController { constructor(hls) { super(hls, '[audio-track-controller]'); this.tracks = []; this.groupId = null; this.tracksInGroup = []; this.trackId = -1; this.currentTrack = null; this.selectDefaultTrack = true; this.registerListeners(); } registerListeners() { const { hls } = this; hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this); hls.on(Events.ERROR, this.onError, this); } unregisterListeners() { const { hls } = this; hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this); hls.off(Events.ERROR, this.onError, this); } destroy() { this.unregisterListeners(); this.tracks.length = 0; this.tracksInGroup.length = 0; this.currentTrack = null; super.destroy(); } onManifestLoading() { this.tracks = []; this.groupId = null; this.tracksInGroup = []; this.trackId = -1; this.currentTrack = null; this.selectDefaultTrack = true; } onManifestParsed(event, data) { this.tracks = data.audioTracks || []; } onAudioTrackLoaded(event, data) { const { id, groupId, details } = data; const trackInActiveGroup = this.tracksInGroup[id]; if (!trackInActiveGroup || trackInActiveGroup.groupId !== groupId) { this.warn(`Track with id:${id} and group:${groupId} not found in active group ${trackInActiveGroup.groupId}`); return; } const curDetails = trackInActiveGroup.details; trackInActiveGroup.details = data.details; this.log(`audio-track ${id} "${trackInActiveGroup.name}" lang:${trackInActiveGroup.lang} group:${groupId} loaded [${details.startSN}-${details.endSN}]`); if (id === this.trackId) { this.playlistLoaded(id, data, curDetails); } } onLevelLoading(event, data) { this.switchLevel(data.level); } onLevelSwitching(event, data) { this.switchLevel(data.level); } switchLevel(levelIndex) { const levelInfo = this.hls.levels[levelIndex]; if (!(levelInfo != null && levelInfo.audioGroupIds)) { return; } const audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId]; if (this.groupId !== audioGroupId) { this.groupId = audioGroupId || null; const audioTracks = this.tracks.filter(track => !audioGroupId || track.groupId === audioGroupId); // Disable selectDefaultTrack if there are no default tracks if (this.selectDefaultTrack && !audioTracks.some(track => track.default)) { this.selectDefaultTrack = false; } this.tracksInGroup = audioTracks; const audioTracksUpdated = { audioTracks }; this.log(`Updating audio tracks, ${audioTracks.length} track(s) found in group:${audioGroupId}`); this.hls.trigger(Events.AUDIO_TRACKS_UPDATED, audioTracksUpdated); this.selectInitialTrack(); } else if (this.shouldReloadPlaylist(this.currentTrack)) { // Retry playlist loading if no playlist is or has been loaded yet this.setAudioTrack(this.trackId); } } onError(event, data) { if (data.fatal || !data.context) { return; } if (data.context.type === PlaylistContextType.AUDIO_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) { this.requestScheduled = -1; this.checkRetry(data); } } get audioTracks() { return this.tracksInGroup; } get audioTrack() { return this.trackId; } set audioTrack(newId) { // If audio track is selected from API then don't choose from the manifest default track this.selectDefaultTrack = false; this.setAudioTrack(newId); } setAudioTrack(newId) { const tracks = this.tracksInGroup; // check if level idx is valid if (newId < 0 || newId >= tracks.length) { this.warn('Invalid id passed to audio-track controller'); return; } // stopping live reloading timer if any this.clearTimer(); const lastTrack = this.currentTrack; tracks[this.trackId]; const track = tracks[newId]; const { groupId, name } = track; this.log(`Switching to audio-track ${newId} "${name}" lang:${track.lang} group:${groupId}`); this.trackId = newId; this.currentTrack = track; this.selectDefaultTrack = false; this.hls.trigger(Events.AUDIO_TRACK_SWITCHING, _objectSpread2({}, track)); // Do not reload track unless live if (track.details && !track.details.live) { return; } const hlsUrlParameters = this.switchParams(track.url, lastTrack == null ? void 0 : lastTrack.details); this.loadPlaylist(hlsUrlParameters); } selectInitialTrack() { const audioTracks = this.tracksInGroup; const trackId = this.findTrackId(this.currentTrack) | this.findTrackId(null); if (trackId !== -1) { this.setAudioTrack(trackId); } else { const error = new Error(`No track found for running audio group-ID: ${this.groupId} track count: ${audioTracks.length}`); this.warn(error.message); this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.AUDIO_TRACK_LOAD_ERROR, fatal: true, error }); } } findTrackId(currentTrack) { const audioTracks = this.tracksInGroup; for (let i = 0; i < audioTracks.length; i++) { const track = audioTracks[i]; if (!this.selectDefaultTrack || track.default) { if (!currentTrack || currentTrack.attrs['STABLE-RENDITION-ID'] !== undefined && currentTrack.attrs['STABLE-RENDITION-ID'] === track.attrs['STABLE-RENDITION-ID']) { return track.id; } if (currentTrack.name === track.name && currentTrack.lang === track.lang) { return track.id; } } } return -1; } loadPlaylist(hlsUrlParameters) { super.loadPlaylist(); const audioTrack = this.tracksInGroup[this.trackId]; if (this.shouldLoadPlaylist(audioTrack)) { const id = audioTrack.id; const groupId = audioTrack.groupId; let url = audioTrack.url; if (hlsUrlParameters) { try { url = hlsUrlParameters.addDirectives(url); } catch (error) { this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`); } } // track not retrieved yet, or live playlist we need to (re)load it this.log(`loading audio-track playlist ${id} "${audioTrack.name}" lang:${audioTrack.lang} group:${groupId}`); this.clearTimer(); this.hls.trigger(Events.AUDIO_TRACK_LOADING, { url, id, groupId, deliveryDirectives: hlsUrlParameters || null }); } } } function subtitleOptionsIdentical(trackList1, trackList2) { if (trackList1.length !== trackList2.length) { return false; } for (let i = 0; i < trackList1.length; i++) { if (!subtitleAttributesIdentical(trackList1[i].attrs, trackList2[i].attrs)) { return false; } } return true; } function subtitleAttributesIdentical(attrs1, attrs2) { // Media options with the same rendition ID must be bit identical const stableRenditionId = attrs1['STABLE-RENDITION-ID']; if (stableRenditionId) { return stableRenditionId === attrs2['STABLE-RENDITION-ID']; } // When rendition ID is not present, compare attributes return !['LANGUAGE', 'NAME', 'CHARACTERISTICS', 'AUTOSELECT', 'DEFAULT', 'FORCED'].some(subtitleAttribute => attrs1[subtitleAttribute] !== attrs2[subtitleAttribute]); } const TICK_INTERVAL = 500; // how often to tick in ms class SubtitleStreamController extends BaseStreamController { constructor(hls, fragmentTracker, keyLoader) { super(hls, fragmentTracker, keyLoader, '[subtitle-stream-controller]', PlaylistLevelType.SUBTITLE); this.levels = []; this.currentTrackId = -1; this.tracksBuffered = []; this.mainDetails = null; this._registerListeners(); } onHandlerDestroying() { this._unregisterListeners(); this.mainDetails = null; } _registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.on(Events.ERROR, this.onError, this); hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this); hls.on(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this); hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this); hls.on(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this); hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this); } _unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this); hls.off(Events.ERROR, this.onError, this); hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this); hls.off(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this); hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this); hls.off(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this); hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this); } startLoad(startPosition) { this.stopLoad(); this.state = State.IDLE; this.setInterval(TICK_INTERVAL); this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition; this.tick(); } onManifestLoading() { this.mainDetails = null; this.fragmentTracker.removeAllFragments(); } onMediaDetaching() { this.tracksBuffered = []; super.onMediaDetaching(); } onLevelLoaded(event, data) { this.mainDetails = data.details; } onSubtitleFragProcessed(event, data) { const { frag, success } = data; this.fragPrevious = frag; this.state = State.IDLE; if (!success) { return; } const buffered = this.tracksBuffered[this.currentTrackId]; if (!buffered) { return; } // Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo // so we can re-use the logic used to detect how much has been buffered let timeRange; const fragStart = frag.start; for (let i = 0; i < buffered.length; i++) { if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) { timeRange = buffered[i]; break; } } const fragEnd = frag.start + frag.duration; if (timeRange) { timeRange.end = fragEnd; } else { timeRange = { start: fragStart, end: fragEnd }; buffered.push(timeRange); } this.fragmentTracker.fragBuffered(frag); } onBufferFlushing(event, data) { const { startOffset, endOffset } = data; if (startOffset === 0 && endOffset !== Number.POSITIVE_INFINITY) { const endOffsetSubtitles = endOffset - 1; if (endOffsetSubtitles <= 0) { return; } data.endOffsetSubtitles = Math.max(0, endOffsetSubtitles); this.tracksBuffered.forEach(buffered => { for (let i = 0; i < buffered.length;) { if (buffered[i].end <= endOffsetSubtitles) { buffered.shift(); continue; } else if (buffered[i].start < endOffsetSubtitles) { buffered[i].start = endOffsetSubtitles; } else { break; } i++; } }); this.fragmentTracker.removeFragmentsInRange(startOffset, endOffsetSubtitles, PlaylistLevelType.SUBTITLE); } } onFragBuffered(event, data) { if (!this.loadedmetadata && data.frag.type === PlaylistLevelType.MAIN) { var _this$media; if ((_this$media = this.media) != null && _this$media.buffered.length) { this.loadedmetadata = true; } } } // If something goes wrong, proceed to next frag, if we were processing one. onError(event, data) { const frag = data.frag; if ((frag == null ? void 0 : frag.type) === PlaylistLevelType.SUBTITLE) { if (this.fragCurrent) { this.fragCurrent.abortRequests(); } if (this.state !== State.STOPPED) { this.state = State.IDLE; } } } // Got all new subtitle levels. onSubtitleTracksUpdated(event, { subtitleTracks }) { if (subtitleOptionsIdentical(this.levels, subtitleTracks)) { this.levels = subtitleTracks.map(mediaPlaylist => new Level(mediaPlaylist)); return; } this.tracksBuffered = []; this.levels = subtitleTracks.map(mediaPlaylist => { const level = new Level(mediaPlaylist); this.tracksBuffered[level.id] = []; return level; }); this.fragmentTracker.removeFragmentsInRange(0, Number.POSITIVE_INFINITY, PlaylistLevelType.SUBTITLE); this.fragPrevious = null; this.mediaBuffer = null; } onSubtitleTrackSwitch(event, data) { this.currentTrackId = data.id; if (!this.levels.length || this.currentTrackId === -1) { this.clearInterval(); return; } // Check if track has the necessary details to load fragments const currentTrack = this.levels[this.currentTrackId]; if (currentTrack != null && currentTrack.details) { this.mediaBuffer = this.mediaBufferTimeRanges; } else { this.mediaBuffer = null; } if (currentTrack) { this.setInterval(TICK_INTERVAL); } } // Got a new set of subtitle fragments. onSubtitleTrackLoaded(event, data) { var _track$details; const { details: newDetails, id: trackId } = data; const { currentTrackId, levels } = this; if (!levels.length) { return; } const track = levels[currentTrackId]; if (trackId >= levels.length || trackId !== currentTrackId || !track) { return; } this.mediaBuffer = this.mediaBufferTimeRanges; let sliding = 0; if (newDetails.live || (_track$details = track.details) != null && _track$details.live) { const mainDetails = this.mainDetails; if (newDetails.deltaUpdateFailed || !mainDetails) { return; } const mainSlidingStartFragment = mainDetails.fragments[0]; if (!track.details) { if (newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) { alignMediaPlaylistByPDT(newDetails, mainDetails); sliding = newDetails.fragments[0].start; } else if (mainSlidingStartFragment) { // line up live playlist with main so that fragments in range are loaded sliding = mainSlidingStartFragment.start; addSliding(newDetails, sliding); } } else { sliding = this.alignPlaylists(newDetails, track.details); if (sliding === 0 && mainSlidingStartFragment) { // realign with main when there is no overlap with last refresh sliding = mainSlidingStartFragment.start; addSliding(newDetails, sliding); } } } track.details = newDetails; this.levelLastLoaded = trackId; if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) { this.setStartPosition(track.details, sliding); } // trigger handler right now this.tick(); // If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload if (newDetails.live && !this.fragCurrent && this.media && this.state === State.IDLE) { const foundFrag = findFragmentByPTS(null, newDetails.fragments, this.media.currentTime, 0); if (!foundFrag) { this.warn('Subtitle playlist not aligned with playback'); track.details = undefined; } } } _handleFragmentLoadComplete(fragLoadedData) { const { frag, payload } = fragLoadedData; const decryptData = frag.decryptdata; const hls = this.hls; if (this.fragContextChanged(frag)) { return; } // check to see if the payload needs to be decrypted if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') { const startTime = performance.now(); // decrypt the subtitles this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => { hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_DECRYPT_ERROR, fatal: false, error: err, reason: err.message, frag }); throw err; }).then(decryptedData => { const endTime = performance.now(); hls.trigger(Events.FRAG_DECRYPTED, { frag, payload: decryptedData, stats: { tstart: startTime, tdecrypt: endTime } }); }).catch(err => { this.warn(`${err.name}: ${err.message}`); this.state = State.IDLE; }); } } doTick() { if (!this.media) { this.state = State.IDLE; return; } if (this.state === State.IDLE) { const { currentTrackId, levels } = this; const track = levels[currentTrackId]; if (!levels.length || !track || !track.details) { return; } const { config } = this; const currentTime = this.getLoadPosition(); const bufferedInfo = BufferHelper.bufferedInfo(this.tracksBuffered[this.currentTrackId] || [], currentTime, config.maxBufferHole); const { end: targetBufferTime, len: bufferLen } = bufferedInfo; const mainBufferInfo = this.getFwdBufferInfo(this.media, PlaylistLevelType.MAIN); const trackDetails = track.details; const maxBufLen = this.getMaxBufferLength(mainBufferInfo == null ? void 0 : mainBufferInfo.len) + trackDetails.levelTargetDuration; if (bufferLen > maxBufLen) { return; } const fragments = trackDetails.fragments; const fragLen = fragments.length; const end = trackDetails.edge; let foundFrag = null; const fragPrevious = this.fragPrevious; if (targetBufferTime < end) { const tolerance = config.maxFragLookUpTolerance; const lookupTolerance = targetBufferTime > end - tolerance ? 0 : tolerance; foundFrag = findFragmentByPTS(fragPrevious, fragments, Math.max(fragments[0].start, targetBufferTime), lookupTolerance); if (!foundFrag && fragPrevious && fragPrevious.start < fragments[0].start) { foundFrag = fragments[0]; } } else { foundFrag = fragments[fragLen - 1]; } if (!foundFrag) { return; } foundFrag = this.mapToInitFragWhenRequired(foundFrag); if (foundFrag.sn !== 'initSegment') { // Load earlier fragment in same discontinuity to make up for misaligned playlists and cues that extend beyond end of segment const curSNIdx = foundFrag.sn - trackDetails.startSN; const prevFrag = fragments[curSNIdx - 1]; if (prevFrag && prevFrag.cc === foundFrag.cc && this.fragmentTracker.getState(prevFrag) === FragmentState.NOT_LOADED) { foundFrag = prevFrag; } } if (this.fragmentTracker.getState(foundFrag) === FragmentState.NOT_LOADED) { // only load if fragment is not loaded this.loadFragment(foundFrag, track, targetBufferTime); } } } getMaxBufferLength(mainBufferLength) { const maxConfigBuffer = super.getMaxBufferLength(); if (!mainBufferLength) { return maxConfigBuffer; } return Math.max(maxConfigBuffer, mainBufferLength); } loadFragment(frag, level, targetBufferTime) { this.fragCurrent = frag; if (frag.sn === 'initSegment') { this._loadInitSegment(frag, level); } else { this.startFragRequested = true; super.loadFragment(frag, level, targetBufferTime); } } get mediaBufferTimeRanges() { return new BufferableInstance(this.tracksBuffered[this.currentTrackId] || []); } } class BufferableInstance { constructor(timeranges) { this.buffered = void 0; const getRange = (name, index, length) => { index = index >>> 0; if (index > length - 1) { throw new DOMException(`Failed to execute '${name}' on 'TimeRanges': The index provided (${index}) is greater than the maximum bound (${length})`); } return timeranges[index][name]; }; this.buffered = { get length() { return timeranges.length; }, end(index) { return getRange('end', index, timeranges.length); }, start(index) { return getRange('start', index, timeranges.length); } }; } } class SubtitleTrackController extends BasePlaylistController { constructor(hls) { super(hls, '[subtitle-track-controller]'); this.media = null; this.tracks = []; this.groupId = null; this.tracksInGroup = []; this.trackId = -1; this.selectDefaultTrack = true; this.queuedDefaultTrack = -1; this.trackChangeListener = () => this.onTextTracksChanged(); this.asyncPollTrackChange = () => this.pollTrackChange(0); this.useTextTrackPolling = false; this.subtitlePollingInterval = -1; this._subtitleDisplay = true; this.registerListeners(); } destroy() { this.unregisterListeners(); this.tracks.length = 0; this.tracksInGroup.length = 0; this.trackChangeListener = this.asyncPollTrackChange = null; super.destroy(); } get subtitleDisplay() { return this._subtitleDisplay; } set subtitleDisplay(value) { this._subtitleDisplay = value; if (this.trackId > -1) { this.toggleTrackModes(this.trackId); } } registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this); hls.on(Events.ERROR, this.onError, this); } unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this); hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this); hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this); hls.off(Events.ERROR, this.onError, this); } // Listen for subtitle track change, then extract the current track ID. onMediaAttached(event, data) { this.media = data.media; if (!this.media) { return; } if (this.queuedDefaultTrack > -1) { this.subtitleTrack = this.queuedDefaultTrack; this.queuedDefaultTrack = -1; } this.useTextTrackPolling = !(this.media.textTracks && 'onchange' in this.media.textTracks); if (this.useTextTrackPolling) { this.pollTrackChange(500); } else { this.media.textTracks.addEventListener('change', this.asyncPollTrackChange); } } pollTrackChange(timeout) { self.clearInterval(this.subtitlePollingInterval); this.subtitlePollingInterval = self.setInterval(this.trackChangeListener, timeout); } onMediaDetaching() { if (!this.media) { return; } self.clearInterval(this.subtitlePollingInterval); if (!this.useTextTrackPolling) { this.media.textTracks.removeEventListener('change', this.asyncPollTrackChange); } if (this.trackId > -1) { this.queuedDefaultTrack = this.trackId; } const textTracks = filterSubtitleTracks(this.media.textTracks); // Clear loaded cues on media detachment from tracks textTracks.forEach(track => { clearCurrentCues(track); }); // Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled. this.subtitleTrack = -1; this.media = null; } onManifestLoading() { this.tracks = []; this.groupId = null; this.tracksInGroup = []; this.trackId = -1; this.selectDefaultTrack = true; } // Fired whenever a new manifest is loaded. onManifestParsed(event, data) { this.tracks = data.subtitleTracks; } onSubtitleTrackLoaded(event, data) { const { id, details } = data; const { trackId } = this; const currentTrack = this.tracksInGroup[trackId]; if (!currentTrack) { this.warn(`Invalid subtitle track id ${id}`); return; } const curDetails = currentTrack.details; currentTrack.details = data.details; this.log(`subtitle track ${id} loaded [${details.startSN}-${details.endSN}]`); if (id === this.trackId) { this.playlistLoaded(id, data, curDetails); } } onLevelLoading(event, data) { this.switchLevel(data.level); } onLevelSwitching(event, data) { this.switchLevel(data.level); } switchLevel(levelIndex) { const levelInfo = this.hls.levels[levelIndex]; if (!(levelInfo != null && levelInfo.textGroupIds)) { return; } const textGroupId = levelInfo.textGroupIds[levelInfo.urlId]; const lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined; if (this.groupId !== textGroupId) { const subtitleTracks = this.tracks.filter(track => !textGroupId || track.groupId === textGroupId); this.tracksInGroup = subtitleTracks; const initialTrackId = this.findTrackId(lastTrack == null ? void 0 : lastTrack.name) || this.findTrackId(); this.groupId = textGroupId || null; const subtitleTracksUpdated = { subtitleTracks }; this.log(`Updating subtitle tracks, ${subtitleTracks.length} track(s) found in "${textGroupId}" group-id`); this.hls.trigger(Events.SUBTITLE_TRACKS_UPDATED, subtitleTracksUpdated); if (initialTrackId !== -1) { this.setSubtitleTrack(initialTrackId, lastTrack); } } else if (this.shouldReloadPlaylist(lastTrack)) { // Retry playlist loading if no playlist is or has been loaded yet this.setSubtitleTrack(this.trackId, lastTrack); } } findTrackId(name) { const textTracks = this.tracksInGroup; for (let i = 0; i < textTracks.length; i++) { const track = textTracks[i]; if (!this.selectDefaultTrack || track.default) { if (!name || name === track.name) { return track.id; } } } return -1; } onError(event, data) { if (data.fatal || !data.context) { return; } if (data.context.type === PlaylistContextType.SUBTITLE_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) { this.checkRetry(data); } } /** get alternate subtitle tracks list from playlist **/ get subtitleTracks() { return this.tracksInGroup; } /** get/set index of the selected subtitle track (based on index in subtitle track lists) **/ get subtitleTrack() { return this.trackId; } set subtitleTrack(newId) { this.selectDefaultTrack = false; const lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined; this.setSubtitleTrack(newId, lastTrack); } loadPlaylist(hlsUrlParameters) { super.loadPlaylist(); const currentTrack = this.tracksInGroup[this.trackId]; if (this.shouldLoadPlaylist(currentTrack)) { const id = currentTrack.id; const groupId = currentTrack.groupId; let url = currentTrack.url; if (hlsUrlParameters) { try { url = hlsUrlParameters.addDirectives(url); } catch (error) { this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`); } } this.log(`Loading subtitle playlist for id ${id}`); this.hls.trigger(Events.SUBTITLE_TRACK_LOADING, { url, id, groupId, deliveryDirectives: hlsUrlParameters || null }); } } /** * Disables the old subtitleTrack and sets current mode on the next subtitleTrack. * This operates on the DOM textTracks. * A value of -1 will disable all subtitle tracks. */ toggleTrackModes(newId) { const { media, trackId } = this; if (!media) { return; } const textTracks = filterSubtitleTracks(media.textTracks); const groupTracks = textTracks.filter(track => track.groupId === this.groupId); if (newId === -1) { [].slice.call(textTracks).forEach(track => { track.mode = 'disabled'; }); } else { const oldTrack = groupTracks[trackId]; if (oldTrack) { oldTrack.mode = 'disabled'; } } const nextTrack = groupTracks[newId]; if (nextTrack) { nextTrack.mode = this.subtitleDisplay ? 'showing' : 'hidden'; } } /** * This method is responsible for validating the subtitle index and periodically reloading if live. * Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track. */ setSubtitleTrack(newId, lastTrack) { var _tracks$newId; const tracks = this.tracksInGroup; // setting this.subtitleTrack will trigger internal logic // if media has not been attached yet, it will fail // we keep a reference to the default track id // and we'll set subtitleTrack when onMediaAttached is triggered if (!this.media) { this.queuedDefaultTrack = newId; return; } if (this.trackId !== newId) { this.toggleTrackModes(newId); } // exit if track id as already set or invalid if (this.trackId === newId && (newId === -1 || (_tracks$newId = tracks[newId]) != null && _tracks$newId.details) || newId < -1 || newId >= tracks.length) { return; } // stopping live reloading timer if any this.clearTimer(); const track = tracks[newId]; this.log(`Switching to subtitle-track ${newId}` + (track ? ` "${track.name}" lang:${track.lang} group:${track.groupId}` : '')); this.trackId = newId; if (track) { const { id, groupId = '', name, type, url } = track; this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, { id, groupId, name, type, url }); const hlsUrlParameters = this.switchParams(track.url, lastTrack == null ? void 0 : lastTrack.details); this.loadPlaylist(hlsUrlParameters); } else { // switch to -1 this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, { id: newId }); } } onTextTracksChanged() { if (!this.useTextTrackPolling) { self.clearInterval(this.subtitlePollingInterval); } // Media is undefined when switching streams via loadSource() if (!this.media || !this.hls.config.renderTextTracksNatively) { return; } let trackId = -1; const tracks = filterSubtitleTracks(this.media.textTracks); for (let id = 0; id < tracks.length; id++) { if (tracks[id].mode === 'hidden') { // Do not break in case there is a following track with showing. trackId = id; } else if (tracks[id].mode === 'showing') { trackId = id; break; } } // Setting current subtitleTrack will invoke code. if (this.subtitleTrack !== trackId) { this.subtitleTrack = trackId; } } } function filterSubtitleTracks(textTrackList) { const tracks = []; for (let i = 0; i < textTrackList.length; i++) { const track = textTrackList[i]; // Edge adds a track without a label; we don't want to use it if ((track.kind === 'subtitles' || track.kind === 'captions') && track.label) { tracks.push(textTrackList[i]); } } return tracks; } class BufferOperationQueue { constructor(sourceBufferReference) { this.buffers = void 0; this.queues = { video: [], audio: [], audiovideo: [] }; this.buffers = sourceBufferReference; } append(operation, type) { const queue = this.queues[type]; queue.push(operation); if (queue.length === 1 && this.buffers[type]) { this.executeNext(type); } } insertAbort(operation, type) { const queue = this.queues[type]; queue.unshift(operation); this.executeNext(type); } appendBlocker(type) { let execute; const promise = new Promise(resolve => { execute = resolve; }); const operation = { execute, onStart: () => {}, onComplete: () => {}, onError: () => {} }; this.append(operation, type); return promise; } executeNext(type) { const { buffers, queues } = this; const sb = buffers[type]; const queue = queues[type]; if (queue.length) { const operation = queue[0]; try { // Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations // which do not end with this event must call _onSBUpdateEnd manually operation.execute(); } catch (e) { logger.warn('[buffer-operation-queue]: Unhandled exception executing the current operation'); operation.onError(e); // Only shift the current operation off, otherwise the updateend handler will do this for us if (!(sb != null && sb.updating)) { queue.shift(); this.executeNext(type); } } } } shiftAndExecuteNext(type) { this.queues[type].shift(); this.executeNext(type); } current(type) { return this.queues[type][0]; } } const MediaSource = getMediaSource(); const VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/; class BufferController { // The level details used to determine duration, target-duration and live // cache the self generated object url to detect hijack of video tag // A queue of buffer operations which require the SourceBuffer to not be updating upon execution // References to event listeners for each SourceBuffer, so that they can be referenced for event removal // The number of BUFFER_CODEC events received before any sourceBuffers are created // The total number of BUFFER_CODEC events received // A reference to the attached media element // A reference to the active media source // Last MP3 audio chunk appended // counters constructor(hls) { this.details = null; this._objectUrl = null; this.operationQueue = void 0; this.listeners = void 0; this.hls = void 0; this.bufferCodecEventsExpected = 0; this._bufferCodecEventsTotal = 0; this.media = null; this.mediaSource = null; this.lastMpegAudioChunk = null; this.appendError = 0; this.tracks = {}; this.pendingTracks = {}; this.sourceBuffer = void 0; // Keep as arrow functions so that we can directly reference these functions directly as event listeners this._onMediaSourceOpen = () => { const { media, mediaSource } = this; logger.log('[buffer-controller]: Media source opened'); if (media) { media.removeEventListener('emptied', this._onMediaEmptied); this.updateMediaElementDuration(); this.hls.trigger(Events.MEDIA_ATTACHED, { media }); } if (mediaSource) { // once received, don't listen anymore to sourceopen event mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen); } this.checkPendingTracks(); }; this._onMediaSourceClose = () => { logger.log('[buffer-controller]: Media source closed'); }; this._onMediaSourceEnded = () => { logger.log('[buffer-controller]: Media source ended'); }; this._onMediaEmptied = () => { const { media, _objectUrl } = this; if (media && media.src !== _objectUrl) { logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${media.src})`); } }; this.hls = hls; this._initSourceBuffer(); this.registerListeners(); } hasSourceTypes() { return this.getSourceBufferTypes().length > 0 || Object.keys(this.pendingTracks).length > 0; } destroy() { this.unregisterListeners(); this.details = null; this.lastMpegAudioChunk = null; } registerListeners() { const { hls } = this; hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.BUFFER_RESET, this.onBufferReset, this); hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this); hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this); hls.on(Events.BUFFER_EOS, this.onBufferEos, this); hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this); hls.on(Events.FRAG_PARSED, this.onFragParsed, this); hls.on(Events.FRAG_CHANGED, this.onFragChanged, this); } unregisterListeners() { const { hls } = this; hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.BUFFER_RESET, this.onBufferReset, this); hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this); hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this); hls.off(Events.BUFFER_EOS, this.onBufferEos, this); hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this); hls.off(Events.FRAG_PARSED, this.onFragParsed, this); hls.off(Events.FRAG_CHANGED, this.onFragChanged, this); } _initSourceBuffer() { this.sourceBuffer = {}; this.operationQueue = new BufferOperationQueue(this.sourceBuffer); this.listeners = { audio: [], video: [], audiovideo: [] }; this.lastMpegAudioChunk = null; } onManifestLoading() { this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0; this.details = null; } onManifestParsed(event, data) { // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller // sourcebuffers will be created all at once when the expected nb of tracks will be reached // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller // it will contain the expected nb of source buffers, no need to compute it let codecEvents = 2; if (data.audio && !data.video || !data.altAudio || !true) { codecEvents = 1; } this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents; logger.log(`${this.bufferCodecEventsExpected} bufferCodec event(s) expected`); } onMediaAttaching(event, data) { const media = this.media = data.media; if (media && MediaSource) { const ms = this.mediaSource = new MediaSource(); // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound ms.addEventListener('sourceopen', this._onMediaSourceOpen); ms.addEventListener('sourceended', this._onMediaSourceEnded); ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source media.src = self.URL.createObjectURL(ms); // cache the locally generated object url this._objectUrl = media.src; media.addEventListener('emptied', this._onMediaEmptied); } } onMediaDetaching() { const { media, mediaSource, _objectUrl } = this; if (mediaSource) { logger.log('[buffer-controller]: media source detaching'); if (mediaSource.readyState === 'open') { try { // endOfStream could trigger exception if any sourcebuffer is in updating state // we don't really care about checking sourcebuffer state here, // as we are anyway detaching the MediaSource // let's just avoid this exception to propagate mediaSource.endOfStream(); } catch (err) { logger.warn(`[buffer-controller]: onMediaDetaching: ${err.message} while calling endOfStream`); } } // Clean up the SourceBuffers by invoking onBufferReset this.onBufferReset(); mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen); mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded); mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as // suggested in https://github.com/w3c/media-source/issues/53. if (media) { media.removeEventListener('emptied', this._onMediaEmptied); if (_objectUrl) { self.URL.revokeObjectURL(_objectUrl); } // clean up video tag src only if it's our own url. some external libraries might // hijack the video tag and change its 'src' without destroying the Hls instance first if (media.src === _objectUrl) { media.removeAttribute('src'); media.load(); } else { logger.warn('[buffer-controller]: media.src was changed by a third party - skip cleanup'); } } this.mediaSource = null; this.media = null; this._objectUrl = null; this.bufferCodecEventsExpected = this._bufferCodecEventsTotal; this.pendingTracks = {}; this.tracks = {}; } this.hls.trigger(Events.MEDIA_DETACHED, undefined); } onBufferReset() { this.getSourceBufferTypes().forEach(type => { const sb = this.sourceBuffer[type]; try { if (sb) { this.removeBufferListeners(type); if (this.mediaSource) { this.mediaSource.removeSourceBuffer(sb); } // Synchronously remove the SB from the map before the next call in order to prevent an async function from // accessing it this.sourceBuffer[type] = undefined; } } catch (err) { logger.warn(`[buffer-controller]: Failed to reset the ${type} buffer`, err); } }); this._initSourceBuffer(); } onBufferCodecs(event, data) { const sourceBufferCount = this.getSourceBufferTypes().length; Object.keys(data).forEach(trackName => { if (sourceBufferCount) { // check if SourceBuffer codec needs to change const track = this.tracks[trackName]; if (track && typeof track.buffer.changeType === 'function') { const { id, codec, levelCodec, container, metadata } = data[trackName]; const currentCodec = (track.levelCodec || track.codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1'); const nextCodec = (levelCodec || codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1'); if (currentCodec !== nextCodec) { const mimeType = `${container};codecs=${levelCodec || codec}`; this.appendChangeType(trackName, mimeType); logger.log(`[buffer-controller]: switching codec ${currentCodec} to ${nextCodec}`); this.tracks[trackName] = { buffer: track.buffer, codec, container, levelCodec, metadata, id }; } } } else { // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks this.pendingTracks[trackName] = data[trackName]; } }); // if sourcebuffers already created, do nothing ... if (sourceBufferCount) { return; } this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0); if (this.mediaSource && this.mediaSource.readyState === 'open') { this.checkPendingTracks(); } } appendChangeType(type, mimeType) { const { operationQueue } = this; const operation = { execute: () => { const sb = this.sourceBuffer[type]; if (sb) { logger.log(`[buffer-controller]: changing ${type} sourceBuffer type to ${mimeType}`); sb.changeType(mimeType); } operationQueue.shiftAndExecuteNext(type); }, onStart: () => {}, onComplete: () => {}, onError: e => { logger.warn(`[buffer-controller]: Failed to change ${type} SourceBuffer type`, e); } }; operationQueue.append(operation, type); } onBufferAppending(event, eventData) { const { hls, operationQueue, tracks } = this; const { data, type, frag, part, chunkMeta } = eventData; const chunkStats = chunkMeta.buffering[type]; const bufferAppendingStart = self.performance.now(); chunkStats.start = bufferAppendingStart; const fragBuffering = frag.stats.buffering; const partBuffering = part ? part.stats.buffering : null; if (fragBuffering.start === 0) { fragBuffering.start = bufferAppendingStart; } if (partBuffering && partBuffering.start === 0) { partBuffering.start = bufferAppendingStart; } // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended) // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset` // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos). // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486 const audioTrack = tracks.audio; let checkTimestampOffset = false; if (type === 'audio' && (audioTrack == null ? void 0 : audioTrack.container) === 'audio/mpeg') { checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn; this.lastMpegAudioChunk = chunkMeta; } const fragStart = frag.start; const operation = { execute: () => { chunkStats.executeStart = self.performance.now(); if (checkTimestampOffset) { const sb = this.sourceBuffer[type]; if (sb) { const delta = fragStart - sb.timestampOffset; if (Math.abs(delta) >= 0.1) { logger.log(`[buffer-controller]: Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`); sb.timestampOffset = fragStart; } } } this.appendExecutor(data, type); }, onStart: () => { // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`); }, onComplete: () => { // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`); const end = self.performance.now(); chunkStats.executeEnd = chunkStats.end = end; if (fragBuffering.first === 0) { fragBuffering.first = end; } if (partBuffering && partBuffering.first === 0) { partBuffering.first = end; } const { sourceBuffer } = this; const timeRanges = {}; for (const type in sourceBuffer) { timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]); } this.appendError = 0; this.hls.trigger(Events.BUFFER_APPENDED, { type, frag, part, chunkMeta, parent: frag.type, timeRanges }); }, onError: err => { // in case any error occured while appending, put back segment in segments table logger.error(`[buffer-controller]: Error encountered while trying to append to the ${type} SourceBuffer`, err); const event = { type: ErrorTypes.MEDIA_ERROR, parent: frag.type, details: ErrorDetails.BUFFER_APPEND_ERROR, frag, part, chunkMeta, error: err, err, fatal: false }; if (err.code === DOMException.QUOTA_EXCEEDED_ERR) { // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror // let's stop appending any segments, and report BUFFER_FULL_ERROR error event.details = ErrorDetails.BUFFER_FULL_ERROR; } else { this.appendError++; event.details = ErrorDetails.BUFFER_APPEND_ERROR; /* with UHD content, we could get loop of quota exceeded error until browser is able to evict some data from sourcebuffer. Retrying can help recover. */ if (this.appendError > hls.config.appendErrorMaxRetry) { logger.error(`[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`); event.fatal = true; } } hls.trigger(Events.ERROR, event); } }; operationQueue.append(operation, type); } onBufferFlushing(event, data) { const { operationQueue } = this; const flushOperation = type => ({ execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset), onStart: () => { // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`); }, onComplete: () => { // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`); this.hls.trigger(Events.BUFFER_FLUSHED, { type }); }, onError: e => { logger.warn(`[buffer-controller]: Failed to remove from ${type} SourceBuffer`, e); } }); if (data.type) { operationQueue.append(flushOperation(data.type), data.type); } else { this.getSourceBufferTypes().forEach(type => { operationQueue.append(flushOperation(type), type); }); } } onFragParsed(event, data) { const { frag, part } = data; const buffersAppendedTo = []; const elementaryStreams = part ? part.elementaryStreams : frag.elementaryStreams; if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) { buffersAppendedTo.push('audiovideo'); } else { if (elementaryStreams[ElementaryStreamTypes.AUDIO]) { buffersAppendedTo.push('audio'); } if (elementaryStreams[ElementaryStreamTypes.VIDEO]) { buffersAppendedTo.push('video'); } } const onUnblocked = () => { const now = self.performance.now(); frag.stats.buffering.end = now; if (part) { part.stats.buffering.end = now; } const stats = part ? part.stats : frag.stats; this.hls.trigger(Events.FRAG_BUFFERED, { frag, part, stats, id: frag.type }); }; if (buffersAppendedTo.length === 0) { logger.warn(`Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`); } this.blockBuffers(onUnblocked, buffersAppendedTo); } onFragChanged(event, data) { this.flushBackBuffer(); } // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos() // an undefined data.type will mark all buffers as EOS. onBufferEos(event, data) { const ended = this.getSourceBufferTypes().reduce((acc, type) => { const sb = this.sourceBuffer[type]; if (sb && (!data.type || data.type === type)) { sb.ending = true; if (!sb.ended) { sb.ended = true; logger.log(`[buffer-controller]: ${type} sourceBuffer now EOS`); } } return acc && !!(!sb || sb.ended); }, true); if (ended) { logger.log(`[buffer-controller]: Queueing mediaSource.endOfStream()`); this.blockBuffers(() => { this.getSourceBufferTypes().forEach(type => { const sb = this.sourceBuffer[type]; if (sb) { sb.ending = false; } }); const { mediaSource } = this; if (!mediaSource || mediaSource.readyState !== 'open') { if (mediaSource) { logger.info(`[buffer-controller]: Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`); } return; } logger.log(`[buffer-controller]: Calling mediaSource.endOfStream()`); // Allow this to throw and be caught by the enqueueing function mediaSource.endOfStream(); }); } } onLevelUpdated(event, { details }) { if (!details.fragments.length) { return; } this.details = details; if (this.getSourceBufferTypes().length) { this.blockBuffers(this.updateMediaElementDuration.bind(this)); } else { this.updateMediaElementDuration(); } } flushBackBuffer() { const { hls, details, media, sourceBuffer } = this; if (!media || details === null) { return; } const sourceBufferTypes = this.getSourceBufferTypes(); if (!sourceBufferTypes.length) { return; } // Support for deprecated liveBackBufferLength const backBufferLength = details.live && hls.config.liveBackBufferLength !== null ? hls.config.liveBackBufferLength : hls.config.backBufferLength; if (!isFiniteNumber(backBufferLength) || backBufferLength < 0) { return; } const currentTime = media.currentTime; const targetDuration = details.levelTargetDuration; const maxBackBufferLength = Math.max(backBufferLength, targetDuration); const targetBackBufferPosition = Math.floor(currentTime / targetDuration) * targetDuration - maxBackBufferLength; sourceBufferTypes.forEach(type => { const sb = sourceBuffer[type]; if (sb) { const buffered = BufferHelper.getBuffered(sb); // when target buffer start exceeds actual buffer start if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) { hls.trigger(Events.BACK_BUFFER_REACHED, { bufferEnd: targetBackBufferPosition }); // Support for deprecated event: if (details.live) { hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, { bufferEnd: targetBackBufferPosition }); } else if (sb.ended && buffered.end(buffered.length - 1) - currentTime < targetDuration * 2) { logger.info(`[buffer-controller]: Cannot flush ${type} back buffer while SourceBuffer is in ended state`); return; } hls.trigger(Events.BUFFER_FLUSHING, { startOffset: 0, endOffset: targetBackBufferPosition, type }); } } }); } /** * Update Media Source duration to current level duration or override to Infinity if configuration parameter * 'liveDurationInfinity` is set to `true` * More details: https://github.com/video-dev/hls.js/issues/355 */ updateMediaElementDuration() { if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') { return; } const { details, hls, media, mediaSource } = this; const levelDuration = details.fragments[0].start + details.totalduration; const mediaDuration = media.duration; const msDuration = isFiniteNumber(mediaSource.duration) ? mediaSource.duration : 0; if (details.live && hls.config.liveDurationInfinity) { // Override duration to Infinity logger.log('[buffer-controller]: Media Source duration is set to Infinity'); mediaSource.duration = Infinity; this.updateSeekableRange(details); } else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) { // levelDuration was the last value we set. // not using mediaSource.duration as the browser may tweak this value // only update Media Source duration if its value increase, this is to avoid // flushing already buffered portion when switching between quality level logger.log(`[buffer-controller]: Updating Media Source duration to ${levelDuration.toFixed(3)}`); mediaSource.duration = levelDuration; } } updateSeekableRange(levelDetails) { const mediaSource = this.mediaSource; const fragments = levelDetails.fragments; const len = fragments.length; if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) { const start = Math.max(0, fragments[0].start); const end = Math.max(start, start + levelDetails.totalduration); mediaSource.setLiveSeekableRange(start, end); } } checkPendingTracks() { const { bufferCodecEventsExpected, operationQueue, pendingTracks } = this; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once. // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after // data has been appended to existing ones. // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers. const pendingTracksCount = Object.keys(pendingTracks).length; if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) { // ok, let's create them now ! this.createSourceBuffers(pendingTracks); this.pendingTracks = {}; // append any pending segments now ! const buffers = this.getSourceBufferTypes(); if (buffers.length) { this.hls.trigger(Events.BUFFER_CREATED, { tracks: this.tracks }); buffers.forEach(type => { operationQueue.executeNext(type); }); } else { const error = new Error('could not create source buffer for media codec(s)'); this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR, fatal: true, error, reason: error.message }); } } } createSourceBuffers(tracks) { const { sourceBuffer, mediaSource } = this; if (!mediaSource) { throw Error('createSourceBuffers called when mediaSource was null'); } for (const trackName in tracks) { if (!sourceBuffer[trackName]) { const track = tracks[trackName]; if (!track) { throw Error(`source buffer exists for track ${trackName}, however track does not`); } // use levelCodec as first priority const codec = track.levelCodec || track.codec; const mimeType = `${track.container};codecs=${codec}`; logger.log(`[buffer-controller]: creating sourceBuffer(${mimeType})`); try { const sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType); const sbName = trackName; this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart); this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd); this.addBufferListener(sbName, 'error', this._onSBUpdateError); this.tracks[trackName] = { buffer: sb, codec: codec, container: track.container, levelCodec: track.levelCodec, metadata: track.metadata, id: track.id }; } catch (err) { logger.error(`[buffer-controller]: error while trying to add sourceBuffer: ${err.message}`); this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_ADD_CODEC_ERROR, fatal: false, error: err, mimeType: mimeType }); } } } } _onSBUpdateStart(type) { const { operationQueue } = this; const operation = operationQueue.current(type); operation.onStart(); } _onSBUpdateEnd(type) { const { operationQueue } = this; const operation = operationQueue.current(type); operation.onComplete(); operationQueue.shiftAndExecuteNext(type); } _onSBUpdateError(type, event) { const error = new Error(`${type} SourceBuffer error`); logger.error(`[buffer-controller]: ${error}`, event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event this.hls.trigger(Events.ERROR, { type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, error, fatal: false }); // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue const operation = this.operationQueue.current(type); if (operation) { operation.onError(event); } } // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually removeExecutor(type, startOffset, endOffset) { const { media, mediaSource, operationQueue, sourceBuffer } = this; const sb = sourceBuffer[type]; if (!media || !mediaSource || !sb) { logger.warn(`[buffer-controller]: Attempting to remove from the ${type} SourceBuffer, but it does not exist`); operationQueue.shiftAndExecuteNext(type); return; } const mediaDuration = isFiniteNumber(media.duration) ? media.duration : Infinity; const msDuration = isFiniteNumber(mediaSource.duration) ? mediaSource.duration : Infinity; const removeStart = Math.max(0, startOffset); const removeEnd = Math.min(endOffset, mediaDuration, msDuration); if (removeEnd > removeStart && !sb.ending) { sb.ended = false; logger.log(`[buffer-controller]: Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`); sb.remove(removeStart, removeEnd); } else { // Cycle the queue operationQueue.shiftAndExecuteNext(type); } } // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually appendExecutor(data, type) { const { operationQueue, sourceBuffer } = this; const sb = sourceBuffer[type]; if (!sb) { logger.warn(`[buffer-controller]: Attempting to append to the ${type} SourceBuffer, but it does not exist`); operationQueue.shiftAndExecuteNext(type); return; } sb.ended = false; sb.appendBuffer(data); } // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue // upon completion, since we already do it here blockBuffers(onUnblocked, buffers = this.getSourceBufferTypes()) { if (!buffers.length) { logger.log('[buffer-controller]: Blocking operation requested, but no SourceBuffers exist'); Promise.resolve().then(onUnblocked); return; } const { operationQueue } = this; // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`); const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type)); Promise.all(blockingOperations).then(() => { // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`); onUnblocked(); buffers.forEach(type => { const sb = this.sourceBuffer[type]; // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration) // While this is a workaround, it's probably useful to have around if (!(sb != null && sb.updating)) { operationQueue.shiftAndExecuteNext(type); } }); }); } getSourceBufferTypes() { return Object.keys(this.sourceBuffer); } addBufferListener(type, event, fn) { const buffer = this.sourceBuffer[type]; if (!buffer) { return; } const listener = fn.bind(this, type); this.listeners[type].push({ event, listener }); buffer.addEventListener(event, listener); } removeBufferListeners(type) { const buffer = this.sourceBuffer[type]; if (!buffer) { return; } this.listeners[type].forEach(l => { buffer.removeEventListener(l.event, l.listener); }); } } /** * * This code was ported from the dash.js project at: * https://github.com/Dash-Industry-Forum/dash.js/blob/development/externals/cea608-parser.js * https://github.com/Dash-Industry-Forum/dash.js/commit/8269b26a761e0853bb21d78780ed945144ecdd4d#diff-71bc295a2d6b6b7093a1d3290d53a4b2 * * The original copyright appears below: * * The copyright in this software is being made available under the BSD License, * included below. This software may be subject to other third party and contributor * rights, including patent rights, and no such rights are granted under this license. * * Copyright (c) 2015-2016, DASH Industry Forum. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * 2. Neither the name of Dash Industry Forum nor the names of its * contributors may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * Exceptions from regular ASCII. CodePoints are mapped to UTF-16 codes */ const specialCea608CharsCodes = { 0x2a: 0xe1, // lowercase a, acute accent 0x5c: 0xe9, // lowercase e, acute accent 0x5e: 0xed, // lowercase i, acute accent 0x5f: 0xf3, // lowercase o, acute accent 0x60: 0xfa, // lowercase u, acute accent 0x7b: 0xe7, // lowercase c with cedilla 0x7c: 0xf7, // division symbol 0x7d: 0xd1, // uppercase N tilde 0x7e: 0xf1, // lowercase n tilde 0x7f: 0x2588, // Full block // THIS BLOCK INCLUDES THE 16 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS // THAT COME FROM HI BYTE=0x11 AND LOW BETWEEN 0x30 AND 0x3F // THIS MEANS THAT \x50 MUST BE ADDED TO THE VALUES 0x80: 0xae, // Registered symbol (R) 0x81: 0xb0, // degree sign 0x82: 0xbd, // 1/2 symbol 0x83: 0xbf, // Inverted (open) question mark 0x84: 0x2122, // Trademark symbol (TM) 0x85: 0xa2, // Cents symbol 0x86: 0xa3, // Pounds sterling 0x87: 0x266a, // Music 8'th note 0x88: 0xe0, // lowercase a, grave accent 0x89: 0x20, // transparent space (regular) 0x8a: 0xe8, // lowercase e, grave accent 0x8b: 0xe2, // lowercase a, circumflex accent 0x8c: 0xea, // lowercase e, circumflex accent 0x8d: 0xee, // lowercase i, circumflex accent 0x8e: 0xf4, // lowercase o, circumflex accent 0x8f: 0xfb, // lowercase u, circumflex accent // THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS // THAT COME FROM HI BYTE=0x12 AND LOW BETWEEN 0x20 AND 0x3F 0x90: 0xc1, // capital letter A with acute 0x91: 0xc9, // capital letter E with acute 0x92: 0xd3, // capital letter O with acute 0x93: 0xda, // capital letter U with acute 0x94: 0xdc, // capital letter U with diaresis 0x95: 0xfc, // lowercase letter U with diaeresis 0x96: 0x2018, // opening single quote 0x97: 0xa1, // inverted exclamation mark 0x98: 0x2a, // asterisk 0x99: 0x2019, // closing single quote 0x9a: 0x2501, // box drawings heavy horizontal 0x9b: 0xa9, // copyright sign 0x9c: 0x2120, // Service mark 0x9d: 0x2022, // (round) bullet 0x9e: 0x201c, // Left double quotation mark 0x9f: 0x201d, // Right double quotation mark 0xa0: 0xc0, // uppercase A, grave accent 0xa1: 0xc2, // uppercase A, circumflex 0xa2: 0xc7, // uppercase C with cedilla 0xa3: 0xc8, // uppercase E, grave accent 0xa4: 0xca, // uppercase E, circumflex 0xa5: 0xcb, // capital letter E with diaresis 0xa6: 0xeb, // lowercase letter e with diaresis 0xa7: 0xce, // uppercase I, circumflex 0xa8: 0xcf, // uppercase I, with diaresis 0xa9: 0xef, // lowercase i, with diaresis 0xaa: 0xd4, // uppercase O, circumflex 0xab: 0xd9, // uppercase U, grave accent 0xac: 0xf9, // lowercase u, grave accent 0xad: 0xdb, // uppercase U, circumflex 0xae: 0xab, // left-pointing double angle quotation mark 0xaf: 0xbb, // right-pointing double angle quotation mark // THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS // THAT COME FROM HI BYTE=0x13 AND LOW BETWEEN 0x20 AND 0x3F 0xb0: 0xc3, // Uppercase A, tilde 0xb1: 0xe3, // Lowercase a, tilde 0xb2: 0xcd, // Uppercase I, acute accent 0xb3: 0xcc, // Uppercase I, grave accent 0xb4: 0xec, // Lowercase i, grave accent 0xb5: 0xd2, // Uppercase O, grave accent 0xb6: 0xf2, // Lowercase o, grave accent 0xb7: 0xd5, // Uppercase O, tilde 0xb8: 0xf5, // Lowercase o, tilde 0xb9: 0x7b, // Open curly brace 0xba: 0x7d, // Closing curly brace 0xbb: 0x5c, // Backslash 0xbc: 0x5e, // Caret 0xbd: 0x5f, // Underscore 0xbe: 0x7c, // Pipe (vertical line) 0xbf: 0x223c, // Tilde operator 0xc0: 0xc4, // Uppercase A, umlaut 0xc1: 0xe4, // Lowercase A, umlaut 0xc2: 0xd6, // Uppercase O, umlaut 0xc3: 0xf6, // Lowercase o, umlaut 0xc4: 0xdf, // Esszett (sharp S) 0xc5: 0xa5, // Yen symbol 0xc6: 0xa4, // Generic currency sign 0xc7: 0x2503, // Box drawings heavy vertical 0xc8: 0xc5, // Uppercase A, ring 0xc9: 0xe5, // Lowercase A, ring 0xca: 0xd8, // Uppercase O, stroke 0xcb: 0xf8, // Lowercase o, strok 0xcc: 0x250f, // Box drawings heavy down and right 0xcd: 0x2513, // Box drawings heavy down and left 0xce: 0x2517, // Box drawings heavy up and right 0xcf: 0x251b // Box drawings heavy up and left }; /** * Utils */ const getCharForByte = function getCharForByte(byte) { let charCode = byte; if (specialCea608CharsCodes.hasOwnProperty(byte)) { charCode = specialCea608CharsCodes[byte]; } return String.fromCharCode(charCode); }; const NR_ROWS = 15; const NR_COLS = 100; // Tables to look up row from PAC data const rowsLowCh1 = { 0x11: 1, 0x12: 3, 0x15: 5, 0x16: 7, 0x17: 9, 0x10: 11, 0x13: 12, 0x14: 14 }; const rowsHighCh1 = { 0x11: 2, 0x12: 4, 0x15: 6, 0x16: 8, 0x17: 10, 0x13: 13, 0x14: 15 }; const rowsLowCh2 = { 0x19: 1, 0x1a: 3, 0x1d: 5, 0x1e: 7, 0x1f: 9, 0x18: 11, 0x1b: 12, 0x1c: 14 }; const rowsHighCh2 = { 0x19: 2, 0x1a: 4, 0x1d: 6, 0x1e: 8, 0x1f: 10, 0x1b: 13, 0x1c: 15 }; const backgroundColors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'black', 'transparent']; class CaptionsLogger { constructor() { this.time = null; this.verboseLevel = 0; } log(severity, msg) { if (this.verboseLevel >= severity) { const m = typeof msg === 'function' ? msg() : msg; logger.log(`${this.time} [${severity}] ${m}`); } } } const numArrayToHexArray = function numArrayToHexArray(numArray) { const hexArray = []; for (let j = 0; j < numArray.length; j++) { hexArray.push(numArray[j].toString(16)); } return hexArray; }; class PenState { constructor(foreground, underline, italics, background, flash) { this.foreground = void 0; this.underline = void 0; this.italics = void 0; this.background = void 0; this.flash = void 0; this.foreground = foreground || 'white'; this.underline = underline || false; this.italics = italics || false; this.background = background || 'black'; this.flash = flash || false; } reset() { this.foreground = 'white'; this.underline = false; this.italics = false; this.background = 'black'; this.flash = false; } setStyles(styles) { const attribs = ['foreground', 'underline', 'italics', 'background', 'flash']; for (let i = 0; i < attribs.length; i++) { const style = attribs[i]; if (styles.hasOwnProperty(style)) { this[style] = styles[style]; } } } isDefault() { return this.foreground === 'white' && !this.underline && !this.italics && this.background === 'black' && !this.flash; } equals(other) { return this.foreground === other.foreground && this.underline === other.underline && this.italics === other.italics && this.background === other.background && this.flash === other.flash; } copy(newPenState) { this.foreground = newPenState.foreground; this.underline = newPenState.underline; this.italics = newPenState.italics; this.background = newPenState.background; this.flash = newPenState.flash; } toString() { return 'color=' + this.foreground + ', underline=' + this.underline + ', italics=' + this.italics + ', background=' + this.background + ', flash=' + this.flash; } } /** * Unicode character with styling and background. * @constructor */ class StyledUnicodeChar { constructor(uchar, foreground, underline, italics, background, flash) { this.uchar = void 0; this.penState = void 0; this.uchar = uchar || ' '; // unicode character this.penState = new PenState(foreground, underline, italics, background, flash); } reset() { this.uchar = ' '; this.penState.reset(); } setChar(uchar, newPenState) { this.uchar = uchar; this.penState.copy(newPenState); } setPenState(newPenState) { this.penState.copy(newPenState); } equals(other) { return this.uchar === other.uchar && this.penState.equals(other.penState); } copy(newChar) { this.uchar = newChar.uchar; this.penState.copy(newChar.penState); } isEmpty() { return this.uchar === ' ' && this.penState.isDefault(); } } /** * CEA-608 row consisting of NR_COLS instances of StyledUnicodeChar. * @constructor */ class Row { constructor(logger) { this.chars = void 0; this.pos = void 0; this.currPenState = void 0; this.cueStartTime = void 0; this.logger = void 0; this.chars = []; for (let i = 0; i < NR_COLS; i++) { this.chars.push(new StyledUnicodeChar()); } this.logger = logger; this.pos = 0; this.currPenState = new PenState(); } equals(other) { let equal = true; for (let i = 0; i < NR_COLS; i++) { if (!this.chars[i].equals(other.chars[i])) { equal = false; break; } } return equal; } copy(other) { for (let i = 0; i < NR_COLS; i++) { this.chars[i].copy(other.chars[i]); } } isEmpty() { let empty = true; for (let i = 0; i < NR_COLS; i++) { if (!this.chars[i].isEmpty()) { empty = false; break; } } return empty; } /** * Set the cursor to a valid column. */ setCursor(absPos) { if (this.pos !== absPos) { this.pos = absPos; } if (this.pos < 0) { this.logger.log(3, 'Negative cursor position ' + this.pos); this.pos = 0; } else if (this.pos > NR_COLS) { this.logger.log(3, 'Too large cursor position ' + this.pos); this.pos = NR_COLS; } } /** * Move the cursor relative to current position. */ moveCursor(relPos) { const newPos = this.pos + relPos; if (relPos > 1) { for (let i = this.pos + 1; i < newPos + 1; i++) { this.chars[i].setPenState(this.currPenState); } } this.setCursor(newPos); } /** * Backspace, move one step back and clear character. */ backSpace() { this.moveCursor(-1); this.chars[this.pos].setChar(' ', this.currPenState); } insertChar(byte) { if (byte >= 0x90) { // Extended char this.backSpace(); } const char = getCharForByte(byte); if (this.pos >= NR_COLS) { this.logger.log(0, () => 'Cannot insert ' + byte.toString(16) + ' (' + char + ') at position ' + this.pos + '. Skipping it!'); return; } this.chars[this.pos].setChar(char, this.currPenState); this.moveCursor(1); } clearFromPos(startPos) { let i; for (i = startPos; i < NR_COLS; i++) { this.chars[i].reset(); } } clear() { this.clearFromPos(0); this.pos = 0; this.currPenState.reset(); } clearToEndOfRow() { this.clearFromPos(this.pos); } getTextString() { const chars = []; let empty = true; for (let i = 0; i < NR_COLS; i++) { const char = this.chars[i].uchar; if (char !== ' ') { empty = false; } chars.push(char); } if (empty) { return ''; } else { return chars.join(''); } } setPenStyles(styles) { this.currPenState.setStyles(styles); const currChar = this.chars[this.pos]; currChar.setPenState(this.currPenState); } } /** * Keep a CEA-608 screen of 32x15 styled characters * @constructor */ class CaptionScreen { constructor(logger) { this.rows = void 0; this.currRow = void 0; this.nrRollUpRows = void 0; this.lastOutputScreen = void 0; this.logger = void 0; this.rows = []; for (let i = 0; i < NR_ROWS; i++) { this.rows.push(new Row(logger)); } // Note that we use zero-based numbering (0-14) this.logger = logger; this.currRow = NR_ROWS - 1; this.nrRollUpRows = null; this.lastOutputScreen = null; this.reset(); } reset() { for (let i = 0; i < NR_ROWS; i++) { this.rows[i].clear(); } this.currRow = NR_ROWS - 1; } equals(other) { let equal = true; for (let i = 0; i < NR_ROWS; i++) { if (!this.rows[i].equals(other.rows[i])) { equal = false; break; } } return equal; } copy(other) { for (let i = 0; i < NR_ROWS; i++) { this.rows[i].copy(other.rows[i]); } } isEmpty() { let empty = true; for (let i = 0; i < NR_ROWS; i++) { if (!this.rows[i].isEmpty()) { empty = false; break; } } return empty; } backSpace() { const row = this.rows[this.currRow]; row.backSpace(); } clearToEndOfRow() { const row = this.rows[this.currRow]; row.clearToEndOfRow(); } /** * Insert a character (without styling) in the current row. */ insertChar(char) { const row = this.rows[this.currRow]; row.insertChar(char); } setPen(styles) { const row = this.rows[this.currRow]; row.setPenStyles(styles); } moveCursor(relPos) { const row = this.rows[this.currRow]; row.moveCursor(relPos); } setCursor(absPos) { this.logger.log(2, 'setCursor: ' + absPos); const row = this.rows[this.currRow]; row.setCursor(absPos); } setPAC(pacData) { this.logger.log(2, () => 'pacData = ' + JSON.stringify(pacData)); let newRow = pacData.row - 1; if (this.nrRollUpRows && newRow < this.nrRollUpRows - 1) { newRow = this.nrRollUpRows - 1; } // Make sure this only affects Roll-up Captions by checking this.nrRollUpRows if (this.nrRollUpRows && this.currRow !== newRow) { // clear all rows first for (let i = 0; i < NR_ROWS; i++) { this.rows[i].clear(); } // Copy this.nrRollUpRows rows from lastOutputScreen and place it in the newRow location // topRowIndex - the start of rows to copy (inclusive index) const topRowIndex = this.currRow + 1 - this.nrRollUpRows; // We only copy if the last position was already shown. // We use the cueStartTime value to check this. const lastOutputScreen = this.lastOutputScreen; if (lastOutputScreen) { const prevLineTime = lastOutputScreen.rows[topRowIndex].cueStartTime; const time = this.logger.time; if (prevLineTime && time !== null && prevLineTime < time) { for (let i = 0; i < this.nrRollUpRows; i++) { this.rows[newRow - this.nrRollUpRows + i + 1].copy(lastOutputScreen.rows[topRowIndex + i]); } } } } this.currRow = newRow; const row = this.rows[this.currRow]; if (pacData.indent !== null) { const indent = pacData.indent; const prevPos = Math.max(indent - 1, 0); row.setCursor(pacData.indent); pacData.color = row.chars[prevPos].penState.foreground; } const styles = { foreground: pacData.color, underline: pacData.underline, italics: pacData.italics, background: 'black', flash: false }; this.setPen(styles); } /** * Set background/extra foreground, but first do back_space, and then insert space (backwards compatibility). */ setBkgData(bkgData) { this.logger.log(2, () => 'bkgData = ' + JSON.stringify(bkgData)); this.backSpace(); this.setPen(bkgData); this.insertChar(0x20); // Space } setRollUpRows(nrRows) { this.nrRollUpRows = nrRows; } rollUp() { if (this.nrRollUpRows === null) { this.logger.log(3, 'roll_up but nrRollUpRows not set yet'); return; // Not properly setup } this.logger.log(1, () => this.getDisplayText()); const topRowIndex = this.currRow + 1 - this.nrRollUpRows; const topRow = this.rows.splice(topRowIndex, 1)[0]; topRow.clear(); this.rows.splice(this.currRow, 0, topRow); this.logger.log(2, 'Rolling up'); // this.logger.log(VerboseLevel.TEXT, this.get_display_text()) } /** * Get all non-empty rows with as unicode text. */ getDisplayText(asOneRow) { asOneRow = asOneRow || false; const displayText = []; let text = ''; let rowNr = -1; for (let i = 0; i < NR_ROWS; i++) { const rowText = this.rows[i].getTextString(); if (rowText) { rowNr = i + 1; if (asOneRow) { displayText.push('Row ' + rowNr + ": '" + rowText + "'"); } else { displayText.push(rowText.trim()); } } } if (displayText.length > 0) { if (asOneRow) { text = '[' + displayText.join(' | ') + ']'; } else { text = displayText.join('\n'); } } return text; } getTextAndFormat() { return this.rows; } } // var modes = ['MODE_ROLL-UP', 'MODE_POP-ON', 'MODE_PAINT-ON', 'MODE_TEXT']; class Cea608Channel { constructor(channelNumber, outputFilter, logger) { this.chNr = void 0; this.outputFilter = void 0; this.mode = void 0; this.verbose = void 0; this.displayedMemory = void 0; this.nonDisplayedMemory = void 0; this.lastOutputScreen = void 0; this.currRollUpRow = void 0; this.writeScreen = void 0; this.cueStartTime = void 0; this.logger = void 0; this.chNr = channelNumber; this.outputFilter = outputFilter; this.mode = null; this.verbose = 0; this.displayedMemory = new CaptionScreen(logger); this.nonDisplayedMemory = new CaptionScreen(logger); this.lastOutputScreen = new CaptionScreen(logger); this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1]; this.writeScreen = this.displayedMemory; this.mode = null; this.cueStartTime = null; // Keeps track of where a cue started. this.logger = logger; } reset() { this.mode = null; this.displayedMemory.reset(); this.nonDisplayedMemory.reset(); this.lastOutputScreen.reset(); this.outputFilter.reset(); this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1]; this.writeScreen = this.displayedMemory; this.mode = null; this.cueStartTime = null; } getHandler() { return this.outputFilter; } setHandler(newHandler) { this.outputFilter = newHandler; } setPAC(pacData) { this.writeScreen.setPAC(pacData); } setBkgData(bkgData) { this.writeScreen.setBkgData(bkgData); } setMode(newMode) { if (newMode === this.mode) { return; } this.mode = newMode; this.logger.log(2, () => 'MODE=' + newMode); if (this.mode === 'MODE_POP-ON') { this.writeScreen = this.nonDisplayedMemory; } else { this.writeScreen = this.displayedMemory; this.writeScreen.reset(); } if (this.mode !== 'MODE_ROLL-UP') { this.displayedMemory.nrRollUpRows = null; this.nonDisplayedMemory.nrRollUpRows = null; } this.mode = newMode; } insertChars(chars) { for (let i = 0; i < chars.length; i++) { this.writeScreen.insertChar(chars[i]); } const screen = this.writeScreen === this.displayedMemory ? 'DISP' : 'NON_DISP'; this.logger.log(2, () => screen + ': ' + this.writeScreen.getDisplayText(true)); if (this.mode === 'MODE_PAINT-ON' || this.mode === 'MODE_ROLL-UP') { this.logger.log(1, () => 'DISPLAYED: ' + this.displayedMemory.getDisplayText(true)); this.outputDataUpdate(); } } ccRCL() { // Resume Caption Loading (switch mode to Pop On) this.logger.log(2, 'RCL - Resume Caption Loading'); this.setMode('MODE_POP-ON'); } ccBS() { // BackSpace this.logger.log(2, 'BS - BackSpace'); if (this.mode === 'MODE_TEXT') { return; } this.writeScreen.backSpace(); if (this.writeScreen === this.displayedMemory) { this.outputDataUpdate(); } } ccAOF() { // Reserved (formerly Alarm Off) } ccAON() { // Reserved (formerly Alarm On) } ccDER() { // Delete to End of Row this.logger.log(2, 'DER- Delete to End of Row'); this.writeScreen.clearToEndOfRow(); this.outputDataUpdate(); } ccRU(nrRows) { // Roll-Up Captions-2,3,or 4 Rows this.logger.log(2, 'RU(' + nrRows + ') - Roll Up'); this.writeScreen = this.displayedMemory; this.setMode('MODE_ROLL-UP'); this.writeScreen.setRollUpRows(nrRows); } ccFON() { // Flash On this.logger.log(2, 'FON - Flash On'); this.writeScreen.setPen({ flash: true }); } ccRDC() { // Resume Direct Captioning (switch mode to PaintOn) this.logger.log(2, 'RDC - Resume Direct Captioning'); this.setMode('MODE_PAINT-ON'); } ccTR() { // Text Restart in text mode (not supported, however) this.logger.log(2, 'TR'); this.setMode('MODE_TEXT'); } ccRTD() { // Resume Text Display in Text mode (not supported, however) this.logger.log(2, 'RTD'); this.setMode('MODE_TEXT'); } ccEDM() { // Erase Displayed Memory this.logger.log(2, 'EDM - Erase Displayed Memory'); this.displayedMemory.reset(); this.outputDataUpdate(true); } ccCR() { // Carriage Return this.logger.log(2, 'CR - Carriage Return'); this.writeScreen.rollUp(); this.outputDataUpdate(true); } ccENM() { // Erase Non-Displayed Memory this.logger.log(2, 'ENM - Erase Non-displayed Memory'); this.nonDisplayedMemory.reset(); } ccEOC() { // End of Caption (Flip Memories) this.logger.log(2, 'EOC - End Of Caption'); if (this.mode === 'MODE_POP-ON') { const tmp = this.displayedMemory; this.displayedMemory = this.nonDisplayedMemory; this.nonDisplayedMemory = tmp; this.writeScreen = this.nonDisplayedMemory; this.logger.log(1, () => 'DISP: ' + this.displayedMemory.getDisplayText()); } this.outputDataUpdate(true); } ccTO(nrCols) { // Tab Offset 1,2, or 3 columns this.logger.log(2, 'TO(' + nrCols + ') - Tab Offset'); this.writeScreen.moveCursor(nrCols); } ccMIDROW(secondByte) { // Parse MIDROW command const styles = { flash: false }; styles.underline = secondByte % 2 === 1; styles.italics = secondByte >= 0x2e; if (!styles.italics) { const colorIndex = Math.floor(secondByte / 2) - 0x10; const colors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta']; styles.foreground = colors[colorIndex]; } else { styles.foreground = 'white'; } this.logger.log(2, 'MIDROW: ' + JSON.stringify(styles)); this.writeScreen.setPen(styles); } outputDataUpdate(dispatch = false) { const time = this.logger.time; if (time === null) { return; } if (this.outputFilter) { if (this.cueStartTime === null && !this.displayedMemory.isEmpty()) { // Start of a new cue this.cueStartTime = time; } else { if (!this.displayedMemory.equals(this.lastOutputScreen)) { this.outputFilter.newCue(this.cueStartTime, time, this.lastOutputScreen); if (dispatch && this.outputFilter.dispatchCue) { this.outputFilter.dispatchCue(); } this.cueStartTime = this.displayedMemory.isEmpty() ? null : time; } } this.lastOutputScreen.copy(this.displayedMemory); } } cueSplitAtTime(t) { if (this.outputFilter) { if (!this.displayedMemory.isEmpty()) { if (this.outputFilter.newCue) { this.outputFilter.newCue(this.cueStartTime, t, this.displayedMemory); } this.cueStartTime = t; } } } } // Will be 1 or 2 when parsing captions class Cea608Parser { constructor(field, out1, out2) { this.channels = void 0; this.currentChannel = 0; this.cmdHistory = void 0; this.logger = void 0; const logger = new CaptionsLogger(); this.channels = [null, new Cea608Channel(field, out1, logger), new Cea608Channel(field + 1, out2, logger)]; this.cmdHistory = createCmdHistory(); this.logger = logger; } getHandler(channel) { return this.channels[channel].getHandler(); } setHandler(channel, newHandler) { this.channels[channel].setHandler(newHandler); } /** * Add data for time t in forms of list of bytes (unsigned ints). The bytes are treated as pairs. */ addData(time, byteList) { let cmdFound; let a; let b; let charsFound = false; this.logger.time = time; for (let i = 0; i < byteList.length; i += 2) { a = byteList[i] & 0x7f; b = byteList[i + 1] & 0x7f; if (a === 0 && b === 0) { continue; } else { this.logger.log(3, '[' + numArrayToHexArray([byteList[i], byteList[i + 1]]) + '] -> (' + numArrayToHexArray([a, b]) + ')'); } cmdFound = this.parseCmd(a, b); if (!cmdFound) { cmdFound = this.parseMidrow(a, b); } if (!cmdFound) { cmdFound = this.parsePAC(a, b); } if (!cmdFound) { cmdFound = this.parseBackgroundAttributes(a, b); } if (!cmdFound) { charsFound = this.parseChars(a, b); if (charsFound) { const currChNr = this.currentChannel; if (currChNr && currChNr > 0) { const channel = this.channels[currChNr]; channel.insertChars(charsFound); } else { this.logger.log(2, 'No channel found yet. TEXT-MODE?'); } } } if (!cmdFound && !charsFound) { this.logger.log(2, "Couldn't parse cleaned data " + numArrayToHexArray([a, b]) + ' orig: ' + numArrayToHexArray([byteList[i], byteList[i + 1]])); } } } /** * Parse Command. * @returns True if a command was found */ parseCmd(a, b) { const { cmdHistory } = this; const cond1 = (a === 0x14 || a === 0x1c || a === 0x15 || a === 0x1d) && b >= 0x20 && b <= 0x2f; const cond2 = (a === 0x17 || a === 0x1f) && b >= 0x21 && b <= 0x23; if (!(cond1 || cond2)) { return false; } if (hasCmdRepeated(a, b, cmdHistory)) { setLastCmd(null, null, cmdHistory); this.logger.log(3, 'Repeated command (' + numArrayToHexArray([a, b]) + ') is dropped'); return true; } const chNr = a === 0x14 || a === 0x15 || a === 0x17 ? 1 : 2; const channel = this.channels[chNr]; if (a === 0x14 || a === 0x15 || a === 0x1c || a === 0x1d) { if (b === 0x20) { channel.ccRCL(); } else if (b === 0x21) { channel.ccBS(); } else if (b === 0x22) { channel.ccAOF(); } else if (b === 0x23) { channel.ccAON(); } else if (b === 0x24) { channel.ccDER(); } else if (b === 0x25) { channel.ccRU(2); } else if (b === 0x26) { channel.ccRU(3); } else if (b === 0x27) { channel.ccRU(4); } else if (b === 0x28) { channel.ccFON(); } else if (b === 0x29) { channel.ccRDC(); } else if (b === 0x2a) { channel.ccTR(); } else if (b === 0x2b) { channel.ccRTD(); } else if (b === 0x2c) { channel.ccEDM(); } else if (b === 0x2d) { channel.ccCR(); } else if (b === 0x2e) { channel.ccENM(); } else if (b === 0x2f) { channel.ccEOC(); } } else { // a == 0x17 || a == 0x1F channel.ccTO(b - 0x20); } setLastCmd(a, b, cmdHistory); this.currentChannel = chNr; return true; } /** * Parse midrow styling command */ parseMidrow(a, b) { let chNr = 0; if ((a === 0x11 || a === 0x19) && b >= 0x20 && b <= 0x2f) { if (a === 0x11) { chNr = 1; } else { chNr = 2; } if (chNr !== this.currentChannel) { this.logger.log(0, 'Mismatch channel in midrow parsing'); return false; } const channel = this.channels[chNr]; if (!channel) { return false; } channel.ccMIDROW(b); this.logger.log(3, 'MIDROW (' + numArrayToHexArray([a, b]) + ')'); return true; } return false; } /** * Parse Preable Access Codes (Table 53). * @returns {Boolean} Tells if PAC found */ parsePAC(a, b) { let row; const cmdHistory = this.cmdHistory; const case1 = (a >= 0x11 && a <= 0x17 || a >= 0x19 && a <= 0x1f) && b >= 0x40 && b <= 0x7f; const case2 = (a === 0x10 || a === 0x18) && b >= 0x40 && b <= 0x5f; if (!(case1 || case2)) { return false; } if (hasCmdRepeated(a, b, cmdHistory)) { setLastCmd(null, null, cmdHistory); return true; // Repeated commands are dropped (once) } const chNr = a <= 0x17 ? 1 : 2; if (b >= 0x40 && b <= 0x5f) { row = chNr === 1 ? rowsLowCh1[a] : rowsLowCh2[a]; } else { // 0x60 <= b <= 0x7F row = chNr === 1 ? rowsHighCh1[a] : rowsHighCh2[a]; } const channel = this.channels[chNr]; if (!channel) { return false; } channel.setPAC(this.interpretPAC(row, b)); setLastCmd(a, b, cmdHistory); this.currentChannel = chNr; return true; } /** * Interpret the second byte of the pac, and return the information. * @returns pacData with style parameters */ interpretPAC(row, byte) { let pacIndex; const pacData = { color: null, italics: false, indent: null, underline: false, row: row }; if (byte > 0x5f) { pacIndex = byte - 0x60; } else { pacIndex = byte - 0x40; } pacData.underline = (pacIndex & 1) === 1; if (pacIndex <= 0xd) { pacData.color = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'white'][Math.floor(pacIndex / 2)]; } else if (pacIndex <= 0xf) { pacData.italics = true; pacData.color = 'white'; } else { pacData.indent = Math.floor((pacIndex - 0x10) / 2) * 4; } return pacData; // Note that row has zero offset. The spec uses 1. } /** * Parse characters. * @returns An array with 1 to 2 codes corresponding to chars, if found. null otherwise. */ parseChars(a, b) { let channelNr; let charCodes = null; let charCode1 = null; if (a >= 0x19) { channelNr = 2; charCode1 = a - 8; } else { channelNr = 1; charCode1 = a; } if (charCode1 >= 0x11 && charCode1 <= 0x13) { // Special character let oneCode; if (charCode1 === 0x11) { oneCode = b + 0x50; } else if (charCode1 === 0x12) { oneCode = b + 0x70; } else { oneCode = b + 0x90; } this.logger.log(2, "Special char '" + getCharForByte(oneCode) + "' in channel " + channelNr); charCodes = [oneCode]; } else if (a >= 0x20 && a <= 0x7f) { charCodes = b === 0 ? [a] : [a, b]; } if (charCodes) { const hexCodes = numArrayToHexArray(charCodes); this.logger.log(3, 'Char codes = ' + hexCodes.join(',')); setLastCmd(a, b, this.cmdHistory); } return charCodes; } /** * Parse extended background attributes as well as new foreground color black. * @returns True if background attributes are found */ parseBackgroundAttributes(a, b) { const case1 = (a === 0x10 || a === 0x18) && b >= 0x20 && b <= 0x2f; const case2 = (a === 0x17 || a === 0x1f) && b >= 0x2d && b <= 0x2f; if (!(case1 || case2)) { return false; } let index; const bkgData = {}; if (a === 0x10 || a === 0x18) { index = Math.floor((b - 0x20) / 2); bkgData.background = backgroundColors[index]; if (b % 2 === 1) { bkgData.background = bkgData.background + '_semi'; } } else if (b === 0x2d) { bkgData.background = 'transparent'; } else { bkgData.foreground = 'black'; if (b === 0x2f) { bkgData.underline = true; } } const chNr = a <= 0x17 ? 1 : 2; const channel = this.channels[chNr]; channel.setBkgData(bkgData); setLastCmd(a, b, this.cmdHistory); return true; } /** * Reset state of parser and its channels. */ reset() { for (let i = 0; i < Object.keys(this.channels).length; i++) { const channel = this.channels[i]; if (channel) { channel.reset(); } } this.cmdHistory = createCmdHistory(); } /** * Trigger the generation of a cue, and the start of a new one if displayScreens are not empty. */ cueSplitAtTime(t) { for (let i = 0; i < this.channels.length; i++) { const channel = this.channels[i]; if (channel) { channel.cueSplitAtTime(t); } } } } function setLastCmd(a, b, cmdHistory) { cmdHistory.a = a; cmdHistory.b = b; } function hasCmdRepeated(a, b, cmdHistory) { return cmdHistory.a === a && cmdHistory.b === b; } function createCmdHistory() { return { a: null, b: null }; } class OutputFilter { constructor(timelineController, trackName) { this.timelineController = void 0; this.cueRanges = []; this.trackName = void 0; this.startTime = null; this.endTime = null; this.screen = null; this.timelineController = timelineController; this.trackName = trackName; } dispatchCue() { if (this.startTime === null) { return; } this.timelineController.addCues(this.trackName, this.startTime, this.endTime, this.screen, this.cueRanges); this.startTime = null; } newCue(startTime, endTime, screen) { if (this.startTime === null || this.startTime > startTime) { this.startTime = startTime; } this.endTime = endTime; this.screen = screen; this.timelineController.createCaptionsTrack(this.trackName); } reset() { this.cueRanges = []; this.startTime = null; } } /** * Copyright 2013 vtt.js Contributors * * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an 'AS IS' BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ var VTTCue = (function () { if (typeof self !== 'undefined' && self.VTTCue) { return self.VTTCue; } const AllowedDirections = ['', 'lr', 'rl']; const AllowedAlignments = ['start', 'middle', 'end', 'left', 'right']; function isAllowedValue(allowed, value) { if (typeof value !== 'string') { return false; } // necessary for assuring the generic conforms to the Array interface if (!Array.isArray(allowed)) { return false; } // reset the type so that the next narrowing works well const lcValue = value.toLowerCase(); // use the allow list to narrow the type to a specific subset of strings if (~allowed.indexOf(lcValue)) { return lcValue; } return false; } function findDirectionSetting(value) { return isAllowedValue(AllowedDirections, value); } function findAlignSetting(value) { return isAllowedValue(AllowedAlignments, value); } function extend(obj, ...rest) { let i = 1; for (; i < arguments.length; i++) { const cobj = arguments[i]; for (const p in cobj) { obj[p] = cobj[p]; } } return obj; } function VTTCue(startTime, endTime, text) { const cue = this; const baseObj = { enumerable: true }; /** * Shim implementation specific properties. These properties are not in * the spec. */ // Lets us know when the VTTCue's data has changed in such a way that we need // to recompute its display state. This lets us compute its display state // lazily. cue.hasBeenReset = false; /** * VTTCue and TextTrackCue properties * http://dev.w3.org/html5/webvtt/#vttcue-interface */ let _id = ''; let _pauseOnExit = false; let _startTime = startTime; let _endTime = endTime; let _text = text; let _region = null; let _vertical = ''; let _snapToLines = true; let _line = 'auto'; let _lineAlign = 'start'; let _position = 50; let _positionAlign = 'middle'; let _size = 50; let _align = 'middle'; Object.defineProperty(cue, 'id', extend({}, baseObj, { get: function () { return _id; }, set: function (value) { _id = '' + value; } })); Object.defineProperty(cue, 'pauseOnExit', extend({}, baseObj, { get: function () { return _pauseOnExit; }, set: function (value) { _pauseOnExit = !!value; } })); Object.defineProperty(cue, 'startTime', extend({}, baseObj, { get: function () { return _startTime; }, set: function (value) { if (typeof value !== 'number') { throw new TypeError('Start time must be set to a number.'); } _startTime = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'endTime', extend({}, baseObj, { get: function () { return _endTime; }, set: function (value) { if (typeof value !== 'number') { throw new TypeError('End time must be set to a number.'); } _endTime = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'text', extend({}, baseObj, { get: function () { return _text; }, set: function (value) { _text = '' + value; this.hasBeenReset = true; } })); // todo: implement VTTRegion polyfill? Object.defineProperty(cue, 'region', extend({}, baseObj, { get: function () { return _region; }, set: function (value) { _region = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'vertical', extend({}, baseObj, { get: function () { return _vertical; }, set: function (value) { const setting = findDirectionSetting(value); // Have to check for false because the setting an be an empty string. if (setting === false) { throw new SyntaxError('An invalid or illegal string was specified.'); } _vertical = setting; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'snapToLines', extend({}, baseObj, { get: function () { return _snapToLines; }, set: function (value) { _snapToLines = !!value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'line', extend({}, baseObj, { get: function () { return _line; }, set: function (value) { if (typeof value !== 'number' && value !== 'auto') { throw new SyntaxError('An invalid number or illegal string was specified.'); } _line = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'lineAlign', extend({}, baseObj, { get: function () { return _lineAlign; }, set: function (value) { const setting = findAlignSetting(value); if (!setting) { throw new SyntaxError('An invalid or illegal string was specified.'); } _lineAlign = setting; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'position', extend({}, baseObj, { get: function () { return _position; }, set: function (value) { if (value < 0 || value > 100) { throw new Error('Position must be between 0 and 100.'); } _position = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'positionAlign', extend({}, baseObj, { get: function () { return _positionAlign; }, set: function (value) { const setting = findAlignSetting(value); if (!setting) { throw new SyntaxError('An invalid or illegal string was specified.'); } _positionAlign = setting; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'size', extend({}, baseObj, { get: function () { return _size; }, set: function (value) { if (value < 0 || value > 100) { throw new Error('Size must be between 0 and 100.'); } _size = value; this.hasBeenReset = true; } })); Object.defineProperty(cue, 'align', extend({}, baseObj, { get: function () { return _align; }, set: function (value) { const setting = findAlignSetting(value); if (!setting) { throw new SyntaxError('An invalid or illegal string was specified.'); } _align = setting; this.hasBeenReset = true; } })); /** * Other <track> spec defined properties */ // http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#text-track-cue-display-state cue.displayState = undefined; } /** * VTTCue methods */ VTTCue.prototype.getCueAsHTML = function () { // Assume WebVTT.convertCueToDOMTree is on the global. const WebVTT = self.WebVTT; return WebVTT.convertCueToDOMTree(self, this.text); }; // this is a polyfill hack return VTTCue; })(); /* * Source: https://github.com/mozilla/vtt.js/blob/master/dist/vtt.js */ class StringDecoder { // eslint-disable-next-line @typescript-eslint/no-unused-vars decode(data, options) { if (!data) { return ''; } if (typeof data !== 'string') { throw new Error('Error - expected string data.'); } return decodeURIComponent(encodeURIComponent(data)); } } // Try to parse input as a time stamp. function parseTimeStamp(input) { function computeSeconds(h, m, s, f) { return (h | 0) * 3600 + (m | 0) * 60 + (s | 0) + parseFloat(f || 0); } const m = input.match(/^(?:(\d+):)?(\d{2}):(\d{2})(\.\d+)?/); if (!m) { return null; } if (parseFloat(m[2]) > 59) { // Timestamp takes the form of [hours]:[minutes].[milliseconds] // First position is hours as it's over 59. return computeSeconds(m[2], m[3], 0, m[4]); } // Timestamp takes the form of [hours (optional)]:[minutes]:[seconds].[milliseconds] return computeSeconds(m[1], m[2], m[3], m[4]); } // A settings object holds key/value pairs and will ignore anything but the first // assignment to a specific key. class Settings { constructor() { this.values = Object.create(null); } // Only accept the first assignment to any key. set(k, v) { if (!this.get(k) && v !== '') { this.values[k] = v; } } // Return the value for a key, or a default value. // If 'defaultKey' is passed then 'dflt' is assumed to be an object with // a number of possible default values as properties where 'defaultKey' is // the key of the property that will be chosen; otherwise it's assumed to be // a single value. get(k, dflt, defaultKey) { if (defaultKey) { return this.has(k) ? this.values[k] : dflt[defaultKey]; } return this.has(k) ? this.values[k] : dflt; } // Check whether we have a value for a key. has(k) { return k in this.values; } // Accept a setting if its one of the given alternatives. alt(k, v, a) { for (let n = 0; n < a.length; ++n) { if (v === a[n]) { this.set(k, v); break; } } } // Accept a setting if its a valid (signed) integer. integer(k, v) { if (/^-?\d+$/.test(v)) { // integer this.set(k, parseInt(v, 10)); } } // Accept a setting if its a valid percentage. percent(k, v) { if (/^([\d]{1,3})(\.[\d]*)?%$/.test(v)) { const percent = parseFloat(v); if (percent >= 0 && percent <= 100) { this.set(k, percent); return true; } } return false; } } // Helper function to parse input into groups separated by 'groupDelim', and // interpret each group as a key/value pair separated by 'keyValueDelim'. function parseOptions(input, callback, keyValueDelim, groupDelim) { const groups = groupDelim ? input.split(groupDelim) : [input]; for (const i in groups) { if (typeof groups[i] !== 'string') { continue; } const kv = groups[i].split(keyValueDelim); if (kv.length !== 2) { continue; } const k = kv[0]; const v = kv[1]; callback(k, v); } } const defaults = new VTTCue(0, 0, ''); // 'middle' was changed to 'center' in the spec: https://github.com/w3c/webvtt/pull/244 // Safari doesn't yet support this change, but FF and Chrome do. const center = defaults.align === 'middle' ? 'middle' : 'center'; function parseCue(input, cue, regionList) { // Remember the original input if we need to throw an error. const oInput = input; // 4.1 WebVTT timestamp function consumeTimeStamp() { const ts = parseTimeStamp(input); if (ts === null) { throw new Error('Malformed timestamp: ' + oInput); } // Remove time stamp from input. input = input.replace(/^[^\sa-zA-Z-]+/, ''); return ts; } // 4.4.2 WebVTT cue settings function consumeCueSettings(input, cue) { const settings = new Settings(); parseOptions(input, function (k, v) { let vals; switch (k) { case 'region': // Find the last region we parsed with the same region id. for (let i = regionList.length - 1; i >= 0; i--) { if (regionList[i].id === v) { settings.set(k, regionList[i].region); break; } } break; case 'vertical': settings.alt(k, v, ['rl', 'lr']); break; case 'line': vals = v.split(','); settings.integer(k, vals[0]); if (settings.percent(k, vals[0])) { settings.set('snapToLines', false); } settings.alt(k, vals[0], ['auto']); if (vals.length === 2) { settings.alt('lineAlign', vals[1], ['start', center, 'end']); } break; case 'position': vals = v.split(','); settings.percent(k, vals[0]); if (vals.length === 2) { settings.alt('positionAlign', vals[1], ['start', center, 'end', 'line-left', 'line-right', 'auto']); } break; case 'size': settings.percent(k, v); break; case 'align': settings.alt(k, v, ['start', center, 'end', 'left', 'right']); break; } }, /:/, /\s/); // Apply default values for any missing fields. cue.region = settings.get('region', null); cue.vertical = settings.get('vertical', ''); let line = settings.get('line', 'auto'); if (line === 'auto' && defaults.line === -1) { // set numeric line number for Safari line = -1; } cue.line = line; cue.lineAlign = settings.get('lineAlign', 'start'); cue.snapToLines = settings.get('snapToLines', true); cue.size = settings.get('size', 100); cue.align = settings.get('align', center); let position = settings.get('position', 'auto'); if (position === 'auto' && defaults.position === 50) { // set numeric position for Safari position = cue.align === 'start' || cue.align === 'left' ? 0 : cue.align === 'end' || cue.align === 'right' ? 100 : 50; } cue.position = position; } function skipWhitespace() { input = input.replace(/^\s+/, ''); } // 4.1 WebVTT cue timings. skipWhitespace(); cue.startTime = consumeTimeStamp(); // (1) collect cue start time skipWhitespace(); if (input.slice(0, 3) !== '-->') { // (3) next characters must match '-->' throw new Error("Malformed time stamp (time stamps must be separated by '-->'): " + oInput); } input = input.slice(3); skipWhitespace(); cue.endTime = consumeTimeStamp(); // (5) collect cue end time // 4.1 WebVTT cue settings list. skipWhitespace(); consumeCueSettings(input, cue); } function fixLineBreaks(input) { return input.replace(/<br(?: \/)?>/gi, '\n'); } class VTTParser { constructor() { this.state = 'INITIAL'; this.buffer = ''; this.decoder = new StringDecoder(); this.regionList = []; this.cue = null; this.oncue = void 0; this.onparsingerror = void 0; this.onflush = void 0; } parse(data) { const _this = this; // If there is no data then we won't decode it, but will just try to parse // whatever is in buffer already. This may occur in circumstances, for // example when flush() is called. if (data) { // Try to decode the data that we received. _this.buffer += _this.decoder.decode(data, { stream: true }); } function collectNextLine() { let buffer = _this.buffer; let pos = 0; buffer = fixLineBreaks(buffer); while (pos < buffer.length && buffer[pos] !== '\r' && buffer[pos] !== '\n') { ++pos; } const line = buffer.slice(0, pos); // Advance the buffer early in case we fail below. if (buffer[pos] === '\r') { ++pos; } if (buffer[pos] === '\n') { ++pos; } _this.buffer = buffer.slice(pos); return line; } // 3.2 WebVTT metadata header syntax function parseHeader(input) { parseOptions(input, function (k, v) { // switch (k) { // case 'region': // 3.3 WebVTT region metadata header syntax // console.log('parse region', v); // parseRegion(v); // break; // } }, /:/); } // 5.1 WebVTT file parsing. try { let line = ''; if (_this.state === 'INITIAL') { // We can't start parsing until we have the first line. if (!/\r\n|\n/.test(_this.buffer)) { return this; } line = collectNextLine(); // strip of UTF-8 BOM if any // https://en.wikipedia.org/wiki/Byte_order_mark#UTF-8 const m = line.match(/^()?WEBVTT([ \t].*)?$/); if (!(m != null && m[0])) { throw new Error('Malformed WebVTT signature.'); } _this.state = 'HEADER'; } let alreadyCollectedLine = false; while (_this.buffer) { // We can't parse a line until we have the full line. if (!/\r\n|\n/.test(_this.buffer)) { return this; } if (!alreadyCollectedLine) { line = collectNextLine(); } else { alreadyCollectedLine = false; } switch (_this.state) { case 'HEADER': // 13-18 - Allow a header (metadata) under the WEBVTT line. if (/:/.test(line)) { parseHeader(line); } else if (!line) { // An empty line terminates the header and starts the body (cues). _this.state = 'ID'; } continue; case 'NOTE': // Ignore NOTE blocks. if (!line) { _this.state = 'ID'; } continue; case 'ID': // Check for the start of NOTE blocks. if (/^NOTE($|[ \t])/.test(line)) { _this.state = 'NOTE'; break; } // 19-29 - Allow any number of line terminators, then initialize new cue values. if (!line) { continue; } _this.cue = new VTTCue(0, 0, ''); _this.state = 'CUE'; // 30-39 - Check if self line contains an optional identifier or timing data. if (line.indexOf('-->') === -1) { _this.cue.id = line; continue; } // Process line as start of a cue. /* falls through */ case 'CUE': // 40 - Collect cue timings and settings. if (!_this.cue) { _this.state = 'BADCUE'; continue; } try { parseCue(line, _this.cue, _this.regionList); } catch (e) { // In case of an error ignore rest of the cue. _this.cue = null; _this.state = 'BADCUE'; continue; } _this.state = 'CUETEXT'; continue; case 'CUETEXT': { const hasSubstring = line.indexOf('-->') !== -1; // 34 - If we have an empty line then report the cue. // 35 - If we have the special substring '-->' then report the cue, // but do not collect the line as we need to process the current // one as a new cue. if (!line || hasSubstring && (alreadyCollectedLine = true)) { // We are done parsing self cue. if (_this.oncue && _this.cue) { _this.oncue(_this.cue); } _this.cue = null; _this.state = 'ID'; continue; } if (_this.cue === null) { continue; } if (_this.cue.text) { _this.cue.text += '\n'; } _this.cue.text += line; } continue; case 'BADCUE': // 54-62 - Collect and discard the remaining cue. if (!line) { _this.state = 'ID'; } } } } catch (e) { // If we are currently parsing a cue, report what we have. if (_this.state === 'CUETEXT' && _this.cue && _this.oncue) { _this.oncue(_this.cue); } _this.cue = null; // Enter BADWEBVTT state if header was not parsed correctly otherwise // another exception occurred so enter BADCUE state. _this.state = _this.state === 'INITIAL' ? 'BADWEBVTT' : 'BADCUE'; } return this; } flush() { const _this = this; try { // Finish decoding the stream. // _this.buffer += _this.decoder.decode(); // Synthesize the end of the current cue or region. if (_this.cue || _this.state === 'HEADER') { _this.buffer += '\n\n'; _this.parse(); } // If we've flushed, parsed, and we're still on the INITIAL state then // that means we don't have enough of the stream to parse the first // line. if (_this.state === 'INITIAL' || _this.state === 'BADWEBVTT') { throw new Error('Malformed WebVTT signature.'); } } catch (e) { if (_this.onparsingerror) { _this.onparsingerror(e); } } if (_this.onflush) { _this.onflush(); } return this; } } const LINEBREAKS = /\r\n|\n\r|\n|\r/g; // String.prototype.startsWith is not supported in IE11 const startsWith = function startsWith(inputString, searchString, position = 0) { return inputString.slice(position, position + searchString.length) === searchString; }; const cueString2millis = function cueString2millis(timeString) { let ts = parseInt(timeString.slice(-3)); const secs = parseInt(timeString.slice(-6, -4)); const mins = parseInt(timeString.slice(-9, -7)); const hours = timeString.length > 9 ? parseInt(timeString.substring(0, timeString.indexOf(':'))) : 0; if (!isFiniteNumber(ts) || !isFiniteNumber(secs) || !isFiniteNumber(mins) || !isFiniteNumber(hours)) { throw Error(`Malformed X-TIMESTAMP-MAP: Local:${timeString}`); } ts += 1000 * secs; ts += 60 * 1000 * mins; ts += 60 * 60 * 1000 * hours; return ts; }; // From https://github.com/darkskyapp/string-hash const hash = function hash(text) { let _hash = 5381; let i = text.length; while (i) { _hash = _hash * 33 ^ text.charCodeAt(--i); } return (_hash >>> 0).toString(); }; // Create a unique hash id for a cue based on start/end times and text. // This helps timeline-controller to avoid showing repeated captions. function generateCueId(startTime, endTime, text) { return hash(startTime.toString()) + hash(endTime.toString()) + hash(text); } const calculateOffset = function calculateOffset(vttCCs, cc, presentationTime) { let currCC = vttCCs[cc]; let prevCC = vttCCs[currCC.prevCC]; // This is the first discontinuity or cues have been processed since the last discontinuity // Offset = current discontinuity time if (!prevCC || !prevCC.new && currCC.new) { vttCCs.ccOffset = vttCCs.presentationOffset = currCC.start; currCC.new = false; return; } // There have been discontinuities since cues were last parsed. // Offset = time elapsed while ((_prevCC = prevCC) != null && _prevCC.new) { var _prevCC; vttCCs.ccOffset += currCC.start - prevCC.start; currCC.new = false; currCC = prevCC; prevCC = vttCCs[currCC.prevCC]; } vttCCs.presentationOffset = presentationTime; }; function parseWebVTT(vttByteArray, initPTS, vttCCs, cc, timeOffset, callBack, errorCallBack) { const parser = new VTTParser(); // Convert byteArray into string, replacing any somewhat exotic linefeeds with "\n", then split on that character. // Uint8Array.prototype.reduce is not implemented in IE11 const vttLines = utf8ArrayToStr(new Uint8Array(vttByteArray)).trim().replace(LINEBREAKS, '\n').split('\n'); const cues = []; const init90kHz = initPTS ? toMpegTsClockFromTimescale(initPTS.baseTime, initPTS.timescale) : 0; let cueTime = '00:00.000'; let timestampMapMPEGTS = 0; let timestampMapLOCAL = 0; let parsingError; let inHeader = true; parser.oncue = function (cue) { // Adjust cue timing; clamp cues to start no earlier than - and drop cues that don't end after - 0 on timeline. const currCC = vttCCs[cc]; let cueOffset = vttCCs.ccOffset; // Calculate subtitle PTS offset const webVttMpegTsMapOffset = (timestampMapMPEGTS - init90kHz) / 90000; // Update offsets for new discontinuities if (currCC != null && currCC.new) { if (timestampMapLOCAL !== undefined) { // When local time is provided, offset = discontinuity start time - local time cueOffset = vttCCs.ccOffset = currCC.start; } else { calculateOffset(vttCCs, cc, webVttMpegTsMapOffset); } } if (webVttMpegTsMapOffset) { if (!initPTS) { parsingError = new Error('Missing initPTS for VTT MPEGTS'); return; } // If we have MPEGTS, offset = presentation time + discontinuity offset cueOffset = webVttMpegTsMapOffset - vttCCs.presentationOffset; } const duration = cue.endTime - cue.startTime; const startTime = normalizePts((cue.startTime + cueOffset - timestampMapLOCAL) * 90000, timeOffset * 90000) / 90000; cue.startTime = Math.max(startTime, 0); cue.endTime = Math.max(startTime + duration, 0); //trim trailing webvtt block whitespaces const text = cue.text.trim(); // Fix encoding of special characters cue.text = decodeURIComponent(encodeURIComponent(text)); // If the cue was not assigned an id from the VTT file (line above the content), create one. if (!cue.id) { cue.id = generateCueId(cue.startTime, cue.endTime, text); } if (cue.endTime > 0) { cues.push(cue); } }; parser.onparsingerror = function (error) { parsingError = error; }; parser.onflush = function () { if (parsingError) { errorCallBack(parsingError); return; } callBack(cues); }; // Go through contents line by line. vttLines.forEach(line => { if (inHeader) { // Look for X-TIMESTAMP-MAP in header. if (startsWith(line, 'X-TIMESTAMP-MAP=')) { // Once found, no more are allowed anyway, so stop searching. inHeader = false; // Extract LOCAL and MPEGTS. line.slice(16).split(',').forEach(timestamp => { if (startsWith(timestamp, 'LOCAL:')) { cueTime = timestamp.slice(6); } else if (startsWith(timestamp, 'MPEGTS:')) { timestampMapMPEGTS = parseInt(timestamp.slice(7)); } }); try { // Convert cue time to seconds timestampMapLOCAL = cueString2millis(cueTime) / 1000; } catch (error) { parsingError = error; } // Return without parsing X-TIMESTAMP-MAP line. return; } else if (line === '') { inHeader = false; } } // Parse line by default. parser.parse(line + '\n'); }); parser.flush(); } const IMSC1_CODEC = 'stpp.ttml.im1t'; // Time format: h:m:s:frames(.subframes) const HMSF_REGEX = /^(\d{2,}):(\d{2}):(\d{2}):(\d{2})\.?(\d+)?$/; // Time format: hours, minutes, seconds, milliseconds, frames, ticks const TIME_UNIT_REGEX = /^(\d*(?:\.\d*)?)(h|m|s|ms|f|t)$/; const textAlignToLineAlign = { left: 'start', center: 'center', right: 'end', start: 'start', end: 'end' }; function parseIMSC1(payload, initPTS, callBack, errorCallBack) { const results = findBox(new Uint8Array(payload), ['mdat']); if (results.length === 0) { errorCallBack(new Error('Could not parse IMSC1 mdat')); return; } const ttmlList = results.map(mdat => utf8ArrayToStr(mdat)); const syncTime = toTimescaleFromScale(initPTS.baseTime, 1, initPTS.timescale); try { ttmlList.forEach(ttml => callBack(parseTTML(ttml, syncTime))); } catch (error) { errorCallBack(error); } } function parseTTML(ttml, syncTime) { const parser = new DOMParser(); const xmlDoc = parser.parseFromString(ttml, 'text/xml'); const tt = xmlDoc.getElementsByTagName('tt')[0]; if (!tt) { throw new Error('Invalid ttml'); } const defaultRateInfo = { frameRate: 30, subFrameRate: 1, frameRateMultiplier: 0, tickRate: 0 }; const rateInfo = Object.keys(defaultRateInfo).reduce((result, key) => { result[key] = tt.getAttribute(`ttp:${key}`) || defaultRateInfo[key]; return result; }, {}); const trim = tt.getAttribute('xml:space') !== 'preserve'; const styleElements = collectionToDictionary(getElementCollection(tt, 'styling', 'style')); const regionElements = collectionToDictionary(getElementCollection(tt, 'layout', 'region')); const cueElements = getElementCollection(tt, 'body', '[begin]'); return [].map.call(cueElements, cueElement => { const cueText = getTextContent(cueElement, trim); if (!cueText || !cueElement.hasAttribute('begin')) { return null; } const startTime = parseTtmlTime(cueElement.getAttribute('begin'), rateInfo); const duration = parseTtmlTime(cueElement.getAttribute('dur'), rateInfo); let endTime = parseTtmlTime(cueElement.getAttribute('end'), rateInfo); if (startTime === null) { throw timestampParsingError(cueElement); } if (endTime === null) { if (duration === null) { throw timestampParsingError(cueElement); } endTime = startTime + duration; } const cue = new VTTCue(startTime - syncTime, endTime - syncTime, cueText); cue.id = generateCueId(cue.startTime, cue.endTime, cue.text); const region = regionElements[cueElement.getAttribute('region')]; const style = styleElements[cueElement.getAttribute('style')]; // Apply styles to cue const styles = getTtmlStyles(region, style, styleElements); const { textAlign } = styles; if (textAlign) { // cue.positionAlign not settable in FF~2016 const lineAlign = textAlignToLineAlign[textAlign]; if (lineAlign) { cue.lineAlign = lineAlign; } cue.align = textAlign; } _extends(cue, styles); return cue; }).filter(cue => cue !== null); } function getElementCollection(fromElement, parentName, childName) { const parent = fromElement.getElementsByTagName(parentName)[0]; if (parent) { return [].slice.call(parent.querySelectorAll(childName)); } return []; } function collectionToDictionary(elementsWithId) { return elementsWithId.reduce((dict, element) => { const id = element.getAttribute('xml:id'); if (id) { dict[id] = element; } return dict; }, {}); } function getTextContent(element, trim) { return [].slice.call(element.childNodes).reduce((str, node, i) => { var _node$childNodes; if (node.nodeName === 'br' && i) { return str + '\n'; } if ((_node$childNodes = node.childNodes) != null && _node$childNodes.length) { return getTextContent(node, trim); } else if (trim) { return str + node.textContent.trim().replace(/\s+/g, ' '); } return str + node.textContent; }, ''); } function getTtmlStyles(region, style, styleElements) { const ttsNs = 'http://www.w3.org/ns/ttml#styling'; let regionStyle = null; const styleAttributes = ['displayAlign', 'textAlign', 'color', 'backgroundColor', 'fontSize', 'fontFamily' // 'fontWeight', // 'lineHeight', // 'wrapOption', // 'fontStyle', // 'direction', // 'writingMode' ]; const regionStyleName = region != null && region.hasAttribute('style') ? region.getAttribute('style') : null; if (regionStyleName && styleElements.hasOwnProperty(regionStyleName)) { regionStyle = styleElements[regionStyleName]; } return styleAttributes.reduce((styles, name) => { const value = getAttributeNS(style, ttsNs, name) || getAttributeNS(region, ttsNs, name) || getAttributeNS(regionStyle, ttsNs, name); if (value) { styles[name] = value; } return styles; }, {}); } function getAttributeNS(element, ns, name) { if (!element) { return null; } return element.hasAttributeNS(ns, name) ? element.getAttributeNS(ns, name) : null; } function timestampParsingError(node) { return new Error(`Could not parse ttml timestamp ${node}`); } function parseTtmlTime(timeAttributeValue, rateInfo) { if (!timeAttributeValue) { return null; } let seconds = parseTimeStamp(timeAttributeValue); if (seconds === null) { if (HMSF_REGEX.test(timeAttributeValue)) { seconds = parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo); } else if (TIME_UNIT_REGEX.test(timeAttributeValue)) { seconds = parseTimeUnits(timeAttributeValue, rateInfo); } } return seconds; } function parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo) { const m = HMSF_REGEX.exec(timeAttributeValue); const frames = (m[4] | 0) + (m[5] | 0) / rateInfo.subFrameRate; return (m[1] | 0) * 3600 + (m[2] | 0) * 60 + (m[3] | 0) + frames / rateInfo.frameRate; } function parseTimeUnits(timeAttributeValue, rateInfo) { const m = TIME_UNIT_REGEX.exec(timeAttributeValue); const value = Number(m[1]); const unit = m[2]; switch (unit) { case 'h': return value * 3600; case 'm': return value * 60; case 'ms': return value * 1000; case 'f': return value / rateInfo.frameRate; case 't': return value / rateInfo.tickRate; } return value; } class TimelineController { constructor(hls) { this.hls = void 0; this.media = null; this.config = void 0; this.enabled = true; this.Cues = void 0; this.textTracks = []; this.tracks = []; this.initPTS = []; this.unparsedVttFrags = []; this.captionsTracks = {}; this.nonNativeCaptionsTracks = {}; this.cea608Parser1 = void 0; this.cea608Parser2 = void 0; this.lastSn = -1; this.lastPartIndex = -1; this.prevCC = -1; this.vttCCs = newVTTCCs(); this.captionsProperties = void 0; this.hls = hls; this.config = hls.config; this.Cues = hls.config.cueHandler; this.captionsProperties = { textTrack1: { label: this.config.captionsTextTrack1Label, languageCode: this.config.captionsTextTrack1LanguageCode }, textTrack2: { label: this.config.captionsTextTrack2Label, languageCode: this.config.captionsTextTrack2LanguageCode }, textTrack3: { label: this.config.captionsTextTrack3Label, languageCode: this.config.captionsTextTrack3LanguageCode }, textTrack4: { label: this.config.captionsTextTrack4Label, languageCode: this.config.captionsTextTrack4LanguageCode } }; if (this.config.enableCEA708Captions) { const channel1 = new OutputFilter(this, 'textTrack1'); const channel2 = new OutputFilter(this, 'textTrack2'); const channel3 = new OutputFilter(this, 'textTrack3'); const channel4 = new OutputFilter(this, 'textTrack4'); this.cea608Parser1 = new Cea608Parser(1, channel1, channel2); this.cea608Parser2 = new Cea608Parser(3, channel3, channel4); } hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this); hls.on(Events.FRAG_LOADING, this.onFragLoading, this); hls.on(Events.FRAG_LOADED, this.onFragLoaded, this); hls.on(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this); hls.on(Events.FRAG_DECRYPTED, this.onFragDecrypted, this); hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this); hls.on(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this); hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); } destroy() { const { hls } = this; hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this); hls.off(Events.FRAG_LOADING, this.onFragLoading, this); hls.off(Events.FRAG_LOADED, this.onFragLoaded, this); hls.off(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this); hls.off(Events.FRAG_DECRYPTED, this.onFragDecrypted, this); hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this); hls.off(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this); hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this); // @ts-ignore this.hls = this.config = this.cea608Parser1 = this.cea608Parser2 = null; } addCues(trackName, startTime, endTime, screen, cueRanges) { // skip cues which overlap more than 50% with previously parsed time ranges let merged = false; for (let i = cueRanges.length; i--;) { const cueRange = cueRanges[i]; const overlap = intersection(cueRange[0], cueRange[1], startTime, endTime); if (overlap >= 0) { cueRange[0] = Math.min(cueRange[0], startTime); cueRange[1] = Math.max(cueRange[1], endTime); merged = true; if (overlap / (endTime - startTime) > 0.5) { return; } } } if (!merged) { cueRanges.push([startTime, endTime]); } if (this.config.renderTextTracksNatively) { const track = this.captionsTracks[trackName]; this.Cues.newCue(track, startTime, endTime, screen); } else { const cues = this.Cues.newCue(null, startTime, endTime, screen); this.hls.trigger(Events.CUES_PARSED, { type: 'captions', cues, track: trackName }); } } // Triggered when an initial PTS is found; used for synchronisation of WebVTT. onInitPtsFound(event, { frag, id, initPTS, timescale }) { const { unparsedVttFrags } = this; if (id === 'main') { this.initPTS[frag.cc] = { baseTime: initPTS, timescale }; } // Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded. // Parse any unparsed fragments upon receiving the initial PTS. if (unparsedVttFrags.length) { this.unparsedVttFrags = []; unparsedVttFrags.forEach(frag => { this.onFragLoaded(Events.FRAG_LOADED, frag); }); } } getExistingTrack(trackName) { const { media } = this; if (media) { for (let i = 0; i < media.textTracks.length; i++) { const textTrack = media.textTracks[i]; if (textTrack[trackName]) { return textTrack; } } } return null; } createCaptionsTrack(trackName) { if (this.config.renderTextTracksNatively) { this.createNativeTrack(trackName); } else { this.createNonNativeTrack(trackName); } } createNativeTrack(trackName) { if (this.captionsTracks[trackName]) { return; } const { captionsProperties, captionsTracks, media } = this; const { label, languageCode } = captionsProperties[trackName]; // Enable reuse of existing text track. const existingTrack = this.getExistingTrack(trackName); if (!existingTrack) { const textTrack = this.createTextTrack('captions', label, languageCode); if (textTrack) { // Set a special property on the track so we know it's managed by Hls.js textTrack[trackName] = true; captionsTracks[trackName] = textTrack; } } else { captionsTracks[trackName] = existingTrack; clearCurrentCues(captionsTracks[trackName]); sendAddTrackEvent(captionsTracks[trackName], media); } } createNonNativeTrack(trackName) { if (this.nonNativeCaptionsTracks[trackName]) { return; } // Create a list of a single track for the provider to consume const trackProperties = this.captionsProperties[trackName]; if (!trackProperties) { return; } const label = trackProperties.label; const track = { _id: trackName, label, kind: 'captions', default: trackProperties.media ? !!trackProperties.media.default : false, closedCaptions: trackProperties.media }; this.nonNativeCaptionsTracks[trackName] = track; this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, { tracks: [track] }); } createTextTrack(kind, label, lang) { const media = this.media; if (!media) { return; } return media.addTextTrack(kind, label, lang); } onMediaAttaching(event, data) { this.media = data.media; this._cleanTracks(); } onMediaDetaching() { const { captionsTracks } = this; Object.keys(captionsTracks).forEach(trackName => { clearCurrentCues(captionsTracks[trackName]); delete captionsTracks[trackName]; }); this.nonNativeCaptionsTracks = {}; } onManifestLoading() { this.lastSn = -1; // Detect discontinuity in fragment parsing this.lastPartIndex = -1; this.prevCC = -1; this.vttCCs = newVTTCCs(); // Detect discontinuity in subtitle manifests this._cleanTracks(); this.tracks = []; this.captionsTracks = {}; this.nonNativeCaptionsTracks = {}; this.textTracks = []; this.unparsedVttFrags = []; this.initPTS = []; if (this.cea608Parser1 && this.cea608Parser2) { this.cea608Parser1.reset(); this.cea608Parser2.reset(); } } _cleanTracks() { // clear outdated subtitles const { media } = this; if (!media) { return; } const textTracks = media.textTracks; if (textTracks) { for (let i = 0; i < textTracks.length; i++) { clearCurrentCues(textTracks[i]); } } } onSubtitleTracksUpdated(event, data) { const tracks = data.subtitleTracks || []; const hasIMSC1 = tracks.some(track => track.textCodec === IMSC1_CODEC); if (this.config.enableWebVTT || hasIMSC1 && this.config.enableIMSC1) { const listIsIdentical = subtitleOptionsIdentical(this.tracks, tracks); if (listIsIdentical) { this.tracks = tracks; return; } this.textTracks = []; this.tracks = tracks; if (this.config.renderTextTracksNatively) { const inUseTracks = this.media ? this.media.textTracks : null; this.tracks.forEach((track, index) => { let textTrack; if (inUseTracks && index < inUseTracks.length) { let inUseTrack = null; for (let i = 0; i < inUseTracks.length; i++) { if (canReuseVttTextTrack(inUseTracks[i], track)) { inUseTrack = inUseTracks[i]; break; } } // Reuse tracks with the same label, but do not reuse 608/708 tracks if (inUseTrack) { textTrack = inUseTrack; } } if (textTrack) { clearCurrentCues(textTrack); } else { const textTrackKind = this._captionsOrSubtitlesFromCharacteristics(track); textTrack = this.createTextTrack(textTrackKind, track.name, track.lang); if (textTrack) { textTrack.mode = 'disabled'; } } if (textTrack) { textTrack.groupId = track.groupId; this.textTracks.push(textTrack); } }); } else if (this.tracks.length) { // Create a list of tracks for the provider to consume const tracksList = this.tracks.map(track => { return { label: track.name, kind: track.type.toLowerCase(), default: track.default, subtitleTrack: track }; }); this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, { tracks: tracksList }); } } } _captionsOrSubtitlesFromCharacteristics(track) { if (track.attrs.CHARACTERISTICS) { const transcribesSpokenDialog = /transcribes-spoken-dialog/gi.test(track.attrs.CHARACTERISTICS); const describesMusicAndSound = /describes-music-and-sound/gi.test(track.attrs.CHARACTERISTICS); if (transcribesSpokenDialog && describesMusicAndSound) { return 'captions'; } } return 'subtitles'; } onManifestLoaded(event, data) { if (this.config.enableCEA708Captions && data.captions) { data.captions.forEach(captionsTrack => { const instreamIdMatch = /(?:CC|SERVICE)([1-4])/.exec(captionsTrack.instreamId); if (!instreamIdMatch) { return; } const trackName = `textTrack${instreamIdMatch[1]}`; const trackProperties = this.captionsProperties[trackName]; if (!trackProperties) { return; } trackProperties.label = captionsTrack.name; if (captionsTrack.lang) { // optional attribute trackProperties.languageCode = captionsTrack.lang; } trackProperties.media = captionsTrack; }); } } closedCaptionsForLevel(frag) { const level = this.hls.levels[frag.level]; return level == null ? void 0 : level.attrs['CLOSED-CAPTIONS']; } onFragLoading(event, data) { const { cea608Parser1, cea608Parser2, lastSn, lastPartIndex } = this; if (!this.enabled || !(cea608Parser1 && cea608Parser2)) { return; } // if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack if (data.frag.type === PlaylistLevelType.MAIN) { var _data$part$index, _data$part; const sn = data.frag.sn; const partIndex = (_data$part$index = data == null ? void 0 : (_data$part = data.part) == null ? void 0 : _data$part.index) != null ? _data$part$index : -1; if (!(sn === lastSn + 1 || sn === lastSn && partIndex === lastPartIndex + 1)) { cea608Parser1.reset(); cea608Parser2.reset(); } this.lastSn = sn; this.lastPartIndex = partIndex; } } onFragLoaded(event, data) { const { frag, payload } = data; if (frag.type === PlaylistLevelType.SUBTITLE) { // If fragment is subtitle type, parse as WebVTT. if (payload.byteLength) { const decryptData = frag.decryptdata; // fragment after decryption has a stats object const decrypted = ('stats' in data); // If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait. if (decryptData == null || !decryptData.encrypted || decrypted) { const trackPlaylistMedia = this.tracks[frag.level]; const vttCCs = this.vttCCs; if (!vttCCs[frag.cc]) { vttCCs[frag.cc] = { start: frag.start, prevCC: this.prevCC, new: true }; this.prevCC = frag.cc; } if (trackPlaylistMedia && trackPlaylistMedia.textCodec === IMSC1_CODEC) { this._parseIMSC1(frag, payload); } else { this._parseVTTs(data); } } } else { // In case there is no payload, finish unsuccessfully. this.hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, { success: false, frag, error: new Error('Empty subtitle payload') }); } } } _parseIMSC1(frag, payload) { const hls = this.hls; parseIMSC1(payload, this.initPTS[frag.cc], cues => { this._appendCues(cues, frag.level); hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, { success: true, frag: frag }); }, error => { logger.log(`Failed to parse IMSC1: ${error}`); hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, { success: false, frag: frag, error }); }); } _parseVTTs(data) { var _frag$initSegment; const { frag, payload } = data; // We need an initial synchronisation PTS. Store fragments as long as none has arrived const { initPTS, unparsedVttFrags } = this; const maxAvCC = initPTS.length - 1; if (!initPTS[frag.cc] && maxAvCC === -1) { unparsedVttFrags.push(data); return; } const hls = this.hls; // Parse the WebVTT file contents. const payloadWebVTT = (_frag$initSegment = frag.initSegment) != null && _frag$initSegment.data ? appendUint8Array(frag.initSegment.data, new Uint8Array(payload)) : payload; parseWebVTT(payloadWebVTT, this.initPTS[frag.cc], this.vttCCs, frag.cc, frag.start, cues => { this._appendCues(cues, frag.level); hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, { success: true, frag: frag }); }, error => { const missingInitPTS = error.message === 'Missing initPTS for VTT MPEGTS'; if (missingInitPTS) { unparsedVttFrags.push(data); } else { this._fallbackToIMSC1(frag, payload); } // Something went wrong while parsing. Trigger event with success false. logger.log(`Failed to parse VTT cue: ${error}`); if (missingInitPTS && maxAvCC > frag.cc) { return; } hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, { success: false, frag: frag, error }); }); } _fallbackToIMSC1(frag, payload) { // If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result const trackPlaylistMedia = this.tracks[frag.level]; if (!trackPlaylistMedia.textCodec) { parseIMSC1(payload, this.initPTS[frag.cc], () => { trackPlaylistMedia.textCodec = IMSC1_CODEC; this._parseIMSC1(frag, payload); }, () => { trackPlaylistMedia.textCodec = 'wvtt'; }); } } _appendCues(cues, fragLevel) { const hls = this.hls; if (this.config.renderTextTracksNatively) { const textTrack = this.textTracks[fragLevel]; // WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled" // before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null // and trying to access getCueById method of cues will throw an exception // Because we check if the mode is disabled, we can force check `cues` below. They can't be null. if (!textTrack || textTrack.mode === 'disabled') { return; } cues.forEach(cue => addCueToTrack(textTrack, cue)); } else { const currentTrack = this.tracks[fragLevel]; if (!currentTrack) { return; } const track = currentTrack.default ? 'default' : 'subtitles' + fragLevel; hls.trigger(Events.CUES_PARSED, { type: 'subtitles', cues, track }); } } onFragDecrypted(event, data) { const { frag } = data; if (frag.type === PlaylistLevelType.SUBTITLE) { this.onFragLoaded(Events.FRAG_LOADED, data); } } onSubtitleTracksCleared() { this.tracks = []; this.captionsTracks = {}; } onFragParsingUserdata(event, data) { const { cea608Parser1, cea608Parser2 } = this; if (!this.enabled || !(cea608Parser1 && cea608Parser2)) { return; } const { frag, samples } = data; if (frag.type === PlaylistLevelType.MAIN && this.closedCaptionsForLevel(frag) === 'NONE') { return; } // If the event contains captions (found in the bytes property), push all bytes into the parser immediately // It will create the proper timestamps based on the PTS value for (let i = 0; i < samples.length; i++) { const ccBytes = samples[i].bytes; if (ccBytes) { const ccdatas = this.extractCea608Data(ccBytes); cea608Parser1.addData(samples[i].pts, ccdatas[0]); cea608Parser2.addData(samples[i].pts, ccdatas[1]); } } } onBufferFlushing(event, { startOffset, endOffset, endOffsetSubtitles, type }) { const { media } = this; if (!media || media.currentTime < endOffset) { return; } // Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed // Forward cues are never removed because we can loose streamed 608 content from recent fragments if (!type || type === 'video') { const { captionsTracks } = this; Object.keys(captionsTracks).forEach(trackName => removeCuesInRange(captionsTracks[trackName], startOffset, endOffset)); } if (this.config.renderTextTracksNatively) { // Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed if (startOffset === 0 && endOffsetSubtitles !== undefined) { const { textTracks } = this; Object.keys(textTracks).forEach(trackName => removeCuesInRange(textTracks[trackName], startOffset, endOffsetSubtitles)); } } } extractCea608Data(byteArray) { const actualCCBytes = [[], []]; const count = byteArray[0] & 0x1f; let position = 2; for (let j = 0; j < count; j++) { const tmpByte = byteArray[position++]; const ccbyte1 = 0x7f & byteArray[position++]; const ccbyte2 = 0x7f & byteArray[position++]; if (ccbyte1 === 0 && ccbyte2 === 0) { continue; } const ccValid = (0x04 & tmpByte) !== 0; // Support all four channels if (ccValid) { const ccType = 0x03 & tmpByte; if (0x00 /* CEA608 field1*/ === ccType || 0x01 /* CEA608 field2*/ === ccType) { // Exclude CEA708 CC data. actualCCBytes[ccType].push(ccbyte1); actualCCBytes[ccType].push(ccbyte2); } } } return actualCCBytes; } } function canReuseVttTextTrack(inUseTrack, manifestTrack) { return !!inUseTrack && inUseTrack.label === manifestTrack.name && !(inUseTrack.textTrack1 || inUseTrack.textTrack2); } function intersection(x1, x2, y1, y2) { return Math.min(x2, y2) - Math.max(x1, y1); } function newVTTCCs() { return { ccOffset: 0, presentationOffset: 0, 0: { start: 0, prevCC: -1, new: true } }; } /* * cap stream level to media size dimension controller */ class CapLevelController { constructor(hls) { this.hls = void 0; this.autoLevelCapping = void 0; this.firstLevel = void 0; this.media = void 0; this.restrictedLevels = void 0; this.timer = void 0; this.clientRect = void 0; this.streamController = void 0; this.hls = hls; this.autoLevelCapping = Number.POSITIVE_INFINITY; this.firstLevel = -1; this.media = null; this.restrictedLevels = []; this.timer = undefined; this.clientRect = null; this.registerListeners(); } setStreamController(streamController) { this.streamController = streamController; } destroy() { this.unregisterListener(); if (this.hls.config.capLevelToPlayerSize) { this.stopCapping(); } this.media = null; this.clientRect = null; // @ts-ignore this.hls = this.streamController = null; } registerListeners() { const { hls } = this; hls.on(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this); hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this); hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this); } unregisterListener() { const { hls } = this; hls.off(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this); hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this); hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this); } onFpsDropLevelCapping(event, data) { // Don't add a restricted level more than once const level = this.hls.levels[data.droppedLevel]; if (this.isLevelAllowed(level)) { this.restrictedLevels.push({ bitrate: level.bitrate, height: level.height, width: level.width }); } } onMediaAttaching(event, data) { this.media = data.media instanceof HTMLVideoElement ? data.media : null; this.clientRect = null; } onManifestParsed(event, data) { const hls = this.hls; this.restrictedLevels = []; this.firstLevel = data.firstLevel; if (hls.config.capLevelToPlayerSize && data.video) { // Start capping immediately if the manifest has signaled video codecs this.startCapping(); } } // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted // to the first level onBufferCodecs(event, data) { const hls = this.hls; if (hls.config.capLevelToPlayerSize && data.video) { // If the manifest did not signal a video codec capping has been deferred until we're certain video is present this.startCapping(); } } onMediaDetaching() { this.stopCapping(); } detectPlayerSize() { if (this.media && this.mediaHeight > 0 && this.mediaWidth > 0) { const levels = this.hls.levels; if (levels.length) { const hls = this.hls; hls.autoLevelCapping = this.getMaxLevel(levels.length - 1); if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) { // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch // usually happen when the user go to the fullscreen mode. this.streamController.nextLevelSwitch(); } this.autoLevelCapping = hls.autoLevelCapping; } } } /* * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled) */ getMaxLevel(capLevelIndex) { const levels = this.hls.levels; if (!levels.length) { return -1; } const validLevels = levels.filter((level, index) => this.isLevelAllowed(level) && index <= capLevelIndex); this.clientRect = null; return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight); } startCapping() { if (this.timer) { // Don't reset capping if started twice; this can happen if the manifest signals a video codec return; } this.autoLevelCapping = Number.POSITIVE_INFINITY; this.hls.firstLevel = this.getMaxLevel(this.firstLevel); self.clearInterval(this.timer); this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000); this.detectPlayerSize(); } stopCapping() { this.restrictedLevels = []; this.firstLevel = -1; this.autoLevelCapping = Number.POSITIVE_INFINITY; if (this.timer) { self.clearInterval(this.timer); this.timer = undefined; } } getDimensions() { if (this.clientRect) { return this.clientRect; } const media = this.media; const boundsRect = { width: 0, height: 0 }; if (media) { const clientRect = media.getBoundingClientRect(); boundsRect.width = clientRect.width; boundsRect.height = clientRect.height; if (!boundsRect.width && !boundsRect.height) { // When the media element has no width or height (equivalent to not being in the DOM), // then use its width and height attributes (media.width, media.height) boundsRect.width = clientRect.right - clientRect.left || media.width || 0; boundsRect.height = clientRect.bottom - clientRect.top || media.height || 0; } } this.clientRect = boundsRect; return boundsRect; } get mediaWidth() { return this.getDimensions().width * this.contentScaleFactor; } get mediaHeight() { return this.getDimensions().height * this.contentScaleFactor; } get contentScaleFactor() { let pixelRatio = 1; if (!this.hls.config.ignoreDevicePixelRatio) { try { pixelRatio = self.devicePixelRatio; } catch (e) { /* no-op */ } } return pixelRatio; } isLevelAllowed(level) { const restrictedLevels = this.restrictedLevels; return !restrictedLevels.some(restrictedLevel => { return level.bitrate === restrictedLevel.bitrate && level.width === restrictedLevel.width && level.height === restrictedLevel.height; }); } static getMaxLevelByMediaSize(levels, width, height) { if (!(levels != null && levels.length)) { return -1; } // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next // to determine whether we've chosen the greatest bandwidth for the media's dimensions const atGreatestBandwidth = (curLevel, nextLevel) => { if (!nextLevel) { return true; } return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height; }; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to // the max level let maxLevelIndex = levels.length - 1; for (let i = 0; i < levels.length; i += 1) { const level = levels[i]; if ((level.width >= width || level.height >= height) && atGreatestBandwidth(level, levels[i + 1])) { maxLevelIndex = i; break; } } return maxLevelIndex; } } class FPSController { // stream controller must be provided as a dependency! constructor(hls) { this.hls = void 0; this.isVideoPlaybackQualityAvailable = false; this.timer = void 0; this.media = null; this.lastTime = void 0; this.lastDroppedFrames = 0; this.lastDecodedFrames = 0; this.streamController = void 0; this.hls = hls; this.registerListeners(); } setStreamController(streamController) { this.streamController = streamController; } registerListeners() { this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); } unregisterListeners() { this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this); } destroy() { if (this.timer) { clearInterval(this.timer); } this.unregisterListeners(); this.isVideoPlaybackQualityAvailable = false; this.media = null; } onMediaAttaching(event, data) { const config = this.hls.config; if (config.capLevelOnFPSDrop) { const media = data.media instanceof self.HTMLVideoElement ? data.media : null; this.media = media; if (media && typeof media.getVideoPlaybackQuality === 'function') { this.isVideoPlaybackQualityAvailable = true; } self.clearInterval(this.timer); this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod); } } checkFPS(video, decodedFrames, droppedFrames) { const currentTime = performance.now(); if (decodedFrames) { if (this.lastTime) { const currentPeriod = currentTime - this.lastTime; const currentDropped = droppedFrames - this.lastDroppedFrames; const currentDecoded = decodedFrames - this.lastDecodedFrames; const droppedFPS = 1000 * currentDropped / currentPeriod; const hls = this.hls; hls.trigger(Events.FPS_DROP, { currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames }); if (droppedFPS > 0) { // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod)); if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) { let currentLevel = hls.currentLevel; logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel); if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) { currentLevel = currentLevel - 1; hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, { level: currentLevel, droppedLevel: hls.currentLevel }); hls.autoLevelCapping = currentLevel; this.streamController.nextLevelSwitch(); } } } } this.lastTime = currentTime; this.lastDroppedFrames = droppedFrames; this.lastDecodedFrames = decodedFrames; } } checkFPSInterval() { const video = this.media; if (video) { if (this.isVideoPlaybackQualityAvailable) { const videoPlaybackQuality = video.getVideoPlaybackQuality(); this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames); } else { // HTMLVideoElement doesn't include the webkit types this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount); } } } } const LOGGER_PREFIX = '[eme]'; /** * Controller to deal with encrypted media extensions (EME) * @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API * * @class * @constructor */ class EMEController { constructor(hls) { this.hls = void 0; this.config = void 0; this.media = null; this.keyFormatPromise = null; this.keySystemAccessPromises = {}; this._requestLicenseFailureCount = 0; this.mediaKeySessions = []; this.keyIdToKeySessionPromise = {}; this.setMediaKeysQueue = EMEController.CDMCleanupPromise ? [EMEController.CDMCleanupPromise] : []; this.onMediaEncrypted = this._onMediaEncrypted.bind(this); this.onWaitingForKey = this._onWaitingForKey.bind(this); this.debug = logger.debug.bind(logger, LOGGER_PREFIX); this.log = logger.log.bind(logger, LOGGER_PREFIX); this.warn = logger.warn.bind(logger, LOGGER_PREFIX); this.error = logger.error.bind(logger, LOGGER_PREFIX); this.hls = hls; this.config = hls.config; this.registerListeners(); } destroy() { this.unregisterListeners(); this.onMediaDetached(); // Remove any references that could be held in config options or callbacks const config = this.config; config.requestMediaKeySystemAccessFunc = null; config.licenseXhrSetup = config.licenseResponseCallback = undefined; config.drmSystems = config.drmSystemOptions = {}; // @ts-ignore this.hls = this.onMediaEncrypted = this.onWaitingForKey = this.keyIdToKeySessionPromise = null; // @ts-ignore this.config = null; } registerListeners() { this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); this.hls.on(Events.MEDIA_DETACHED, this.onMediaDetached, this); this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); this.hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this); } unregisterListeners() { this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); this.hls.off(Events.MEDIA_DETACHED, this.onMediaDetached, this); this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this); } getLicenseServerUrl(keySystem) { const { drmSystems, widevineLicenseUrl } = this.config; const keySystemConfiguration = drmSystems[keySystem]; if (keySystemConfiguration) { return keySystemConfiguration.licenseUrl; } // For backward compatibility if (keySystem === KeySystems.WIDEVINE && widevineLicenseUrl) { return widevineLicenseUrl; } throw new Error(`no license server URL configured for key-system "${keySystem}"`); } getServerCertificateUrl(keySystem) { const { drmSystems } = this.config; const keySystemConfiguration = drmSystems[keySystem]; if (keySystemConfiguration) { return keySystemConfiguration.serverCertificateUrl; } else { this.log(`No Server Certificate in config.drmSystems["${keySystem}"]`); } } attemptKeySystemAccess(keySystemsToAttempt) { const levels = this.hls.levels; const uniqueCodec = (value, i, a) => !!value && a.indexOf(value) === i; const audioCodecs = levels.map(level => level.audioCodec).filter(uniqueCodec); const videoCodecs = levels.map(level => level.videoCodec).filter(uniqueCodec); if (audioCodecs.length + videoCodecs.length === 0) { videoCodecs.push('avc1.42e01e'); } return new Promise((resolve, reject) => { const attempt = keySystems => { const keySystem = keySystems.shift(); this.getMediaKeysPromise(keySystem, audioCodecs, videoCodecs).then(mediaKeys => resolve({ keySystem, mediaKeys })).catch(error => { if (keySystems.length) { attempt(keySystems); } else if (error instanceof EMEKeyError) { reject(error); } else { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_NO_ACCESS, error, fatal: true }, error.message)); } }); }; attempt(keySystemsToAttempt); }); } requestMediaKeySystemAccess(keySystem, supportedConfigurations) { const { requestMediaKeySystemAccessFunc } = this.config; if (!(typeof requestMediaKeySystemAccessFunc === 'function')) { let errMessage = `Configured requestMediaKeySystemAccess is not a function ${requestMediaKeySystemAccessFunc}`; if (requestMediaKeySystemAccess === null && self.location.protocol === 'http:') { errMessage = `navigator.requestMediaKeySystemAccess is not available over insecure protocol ${location.protocol}`; } return Promise.reject(new Error(errMessage)); } return requestMediaKeySystemAccessFunc(keySystem, supportedConfigurations); } getMediaKeysPromise(keySystem, audioCodecs, videoCodecs) { // This can throw, but is caught in event handler callpath const mediaKeySystemConfigs = getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, this.config.drmSystemOptions); const keySystemAccessPromises = this.keySystemAccessPromises[keySystem]; let keySystemAccess = keySystemAccessPromises == null ? void 0 : keySystemAccessPromises.keySystemAccess; if (!keySystemAccess) { this.log(`Requesting encrypted media "${keySystem}" key-system access with config: ${JSON.stringify(mediaKeySystemConfigs)}`); keySystemAccess = this.requestMediaKeySystemAccess(keySystem, mediaKeySystemConfigs); const _keySystemAccessPromises = this.keySystemAccessPromises[keySystem] = { keySystemAccess }; keySystemAccess.catch(error => { this.log(`Failed to obtain access to key-system "${keySystem}": ${error}`); }); return keySystemAccess.then(mediaKeySystemAccess => { this.log(`Access for key-system "${mediaKeySystemAccess.keySystem}" obtained`); const certificateRequest = this.fetchServerCertificate(keySystem); this.log(`Create media-keys for "${keySystem}"`); _keySystemAccessPromises.mediaKeys = mediaKeySystemAccess.createMediaKeys().then(mediaKeys => { this.log(`Media-keys created for "${keySystem}"`); return certificateRequest.then(certificate => { if (certificate) { return this.setMediaKeysServerCertificate(mediaKeys, keySystem, certificate); } return mediaKeys; }); }); _keySystemAccessPromises.mediaKeys.catch(error => { this.error(`Failed to create media-keys for "${keySystem}"}: ${error}`); }); return _keySystemAccessPromises.mediaKeys; }); } return keySystemAccess.then(() => keySystemAccessPromises.mediaKeys); } createMediaKeySessionContext({ decryptdata, keySystem, mediaKeys }) { this.log(`Creating key-system session "${keySystem}" keyId: ${Hex.hexDump(decryptdata.keyId || [])}`); const mediaKeysSession = mediaKeys.createSession(); const mediaKeySessionContext = { decryptdata, keySystem, mediaKeys, mediaKeysSession, keyStatus: 'status-pending' }; this.mediaKeySessions.push(mediaKeySessionContext); return mediaKeySessionContext; } renewKeySession(mediaKeySessionContext) { const decryptdata = mediaKeySessionContext.decryptdata; if (decryptdata.pssh) { const keySessionContext = this.createMediaKeySessionContext(mediaKeySessionContext); const keyId = this.getKeyIdString(decryptdata); const scheme = 'cenc'; this.keyIdToKeySessionPromise[keyId] = this.generateRequestWithPreferredKeySession(keySessionContext, scheme, decryptdata.pssh, 'expired'); } else { this.warn(`Could not renew expired session. Missing pssh initData.`); } this.removeSession(mediaKeySessionContext); } getKeyIdString(decryptdata) { if (!decryptdata) { throw new Error('Could not read keyId of undefined decryptdata'); } if (decryptdata.keyId === null) { throw new Error('keyId is null'); } return Hex.hexDump(decryptdata.keyId); } updateKeySession(mediaKeySessionContext, data) { var _mediaKeySessionConte; const keySession = mediaKeySessionContext.mediaKeysSession; this.log(`Updating key-session "${keySession.sessionId}" for keyID ${Hex.hexDump(((_mediaKeySessionConte = mediaKeySessionContext.decryptdata) == null ? void 0 : _mediaKeySessionConte.keyId) || [])} } (data length: ${data ? data.byteLength : data})`); return keySession.update(data); } selectKeySystemFormat(frag) { const keyFormats = Object.keys(frag.levelkeys || {}); if (!this.keyFormatPromise) { this.log(`Selecting key-system from fragment (sn: ${frag.sn} ${frag.type}: ${frag.level}) key formats ${keyFormats.join(', ')}`); this.keyFormatPromise = this.getKeyFormatPromise(keyFormats); } return this.keyFormatPromise; } getKeyFormatPromise(keyFormats) { return new Promise((resolve, reject) => { const keySystemsInConfig = getKeySystemsForConfig(this.config); const keySystemsToAttempt = keyFormats.map(keySystemFormatToKeySystemDomain).filter(value => !!value && keySystemsInConfig.indexOf(value) !== -1); return this.getKeySystemSelectionPromise(keySystemsToAttempt).then(({ keySystem }) => { const keySystemFormat = keySystemDomainToKeySystemFormat(keySystem); if (keySystemFormat) { resolve(keySystemFormat); } else { reject(new Error(`Unable to find format for key-system "${keySystem}"`)); } }).catch(reject); }); } loadKey(data) { const decryptdata = data.keyInfo.decryptdata; const keyId = this.getKeyIdString(decryptdata); const keyDetails = `(keyId: ${keyId} format: "${decryptdata.keyFormat}" method: ${decryptdata.method} uri: ${decryptdata.uri})`; this.log(`Starting session for key ${keyDetails}`); let keySessionContextPromise = this.keyIdToKeySessionPromise[keyId]; if (!keySessionContextPromise) { keySessionContextPromise = this.keyIdToKeySessionPromise[keyId] = this.getKeySystemForKeyPromise(decryptdata).then(({ keySystem, mediaKeys }) => { this.throwIfDestroyed(); this.log(`Handle encrypted media sn: ${data.frag.sn} ${data.frag.type}: ${data.frag.level} using key ${keyDetails}`); return this.attemptSetMediaKeys(keySystem, mediaKeys).then(() => { this.throwIfDestroyed(); const keySessionContext = this.createMediaKeySessionContext({ keySystem, mediaKeys, decryptdata }); const scheme = 'cenc'; return this.generateRequestWithPreferredKeySession(keySessionContext, scheme, decryptdata.pssh, 'playlist-key'); }); }); keySessionContextPromise.catch(error => this.handleError(error)); } return keySessionContextPromise; } throwIfDestroyed(message = 'Invalid state') { if (!this.hls) { throw new Error('invalid state'); } } handleError(error) { if (!this.hls) { return; } this.error(error.message); if (error instanceof EMEKeyError) { this.hls.trigger(Events.ERROR, error.data); } else { this.hls.trigger(Events.ERROR, { type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_NO_KEYS, error, fatal: true }); } } getKeySystemForKeyPromise(decryptdata) { const keyId = this.getKeyIdString(decryptdata); const mediaKeySessionContext = this.keyIdToKeySessionPromise[keyId]; if (!mediaKeySessionContext) { const keySystem = keySystemFormatToKeySystemDomain(decryptdata.keyFormat); const keySystemsToAttempt = keySystem ? [keySystem] : getKeySystemsForConfig(this.config); return this.attemptKeySystemAccess(keySystemsToAttempt); } return mediaKeySessionContext; } getKeySystemSelectionPromise(keySystemsToAttempt) { if (!keySystemsToAttempt.length) { keySystemsToAttempt = getKeySystemsForConfig(this.config); } if (keySystemsToAttempt.length === 0) { throw new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_NO_CONFIGURED_LICENSE, fatal: true }, `Missing key-system license configuration options ${JSON.stringify({ drmSystems: this.config.drmSystems })}`); } return this.attemptKeySystemAccess(keySystemsToAttempt); } _onMediaEncrypted(event) { const { initDataType, initData } = event; this.debug(`"${event.type}" event: init data type: "${initDataType}"`); // Ignore event when initData is null if (initData === null) { return; } let keyId; let keySystemDomain; if (initDataType === 'sinf' && this.config.drmSystems[KeySystems.FAIRPLAY]) { // Match sinf keyId to playlist skd://keyId= const json = bin2str(new Uint8Array(initData)); try { const sinf = base64Decode(JSON.parse(json).sinf); const tenc = parseSinf(new Uint8Array(sinf)); if (!tenc) { return; } keyId = tenc.subarray(8, 24); keySystemDomain = KeySystems.FAIRPLAY; } catch (error) { this.warn('Failed to parse sinf "encrypted" event message initData'); return; } } else { // Support clear-lead key-session creation (otherwise depend on playlist keys) const psshInfo = parsePssh(initData); if (psshInfo === null) { return; } if (psshInfo.version === 0 && psshInfo.systemId === KeySystemIds.WIDEVINE && psshInfo.data) { keyId = psshInfo.data.subarray(8, 24); } keySystemDomain = keySystemIdToKeySystemDomain(psshInfo.systemId); } if (!keySystemDomain || !keyId) { return; } const keyIdHex = Hex.hexDump(keyId); const { keyIdToKeySessionPromise, mediaKeySessions } = this; let keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex]; for (let i = 0; i < mediaKeySessions.length; i++) { // Match playlist key const keyContext = mediaKeySessions[i]; const decryptdata = keyContext.decryptdata; if (decryptdata.pssh || !decryptdata.keyId) { continue; } const oldKeyIdHex = Hex.hexDump(decryptdata.keyId); if (keyIdHex === oldKeyIdHex || decryptdata.uri.replace(/-/g, '').indexOf(keyIdHex) !== -1) { keySessionContextPromise = keyIdToKeySessionPromise[oldKeyIdHex]; delete keyIdToKeySessionPromise[oldKeyIdHex]; decryptdata.pssh = new Uint8Array(initData); decryptdata.keyId = keyId; keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = keySessionContextPromise.then(() => { return this.generateRequestWithPreferredKeySession(keyContext, initDataType, initData, 'encrypted-event-key-match'); }); break; } } if (!keySessionContextPromise) { // Clear-lead key (not encountered in playlist) keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = this.getKeySystemSelectionPromise([keySystemDomain]).then(({ keySystem, mediaKeys }) => { var _keySystemToKeySystem; this.throwIfDestroyed(); const decryptdata = new LevelKey('ISO-23001-7', keyIdHex, (_keySystemToKeySystem = keySystemDomainToKeySystemFormat(keySystem)) != null ? _keySystemToKeySystem : ''); decryptdata.pssh = new Uint8Array(initData); decryptdata.keyId = keyId; return this.attemptSetMediaKeys(keySystem, mediaKeys).then(() => { this.throwIfDestroyed(); const keySessionContext = this.createMediaKeySessionContext({ decryptdata, keySystem, mediaKeys }); return this.generateRequestWithPreferredKeySession(keySessionContext, initDataType, initData, 'encrypted-event-no-match'); }); }); } keySessionContextPromise.catch(error => this.handleError(error)); } _onWaitingForKey(event) { this.log(`"${event.type}" event`); } attemptSetMediaKeys(keySystem, mediaKeys) { const queue = this.setMediaKeysQueue.slice(); this.log(`Setting media-keys for "${keySystem}"`); // Only one setMediaKeys() can run at one time, and multiple setMediaKeys() operations // can be queued for execution for multiple key sessions. const setMediaKeysPromise = Promise.all(queue).then(() => { if (!this.media) { throw new Error('Attempted to set mediaKeys without media element attached'); } return this.media.setMediaKeys(mediaKeys); }); this.setMediaKeysQueue.push(setMediaKeysPromise); return setMediaKeysPromise.then(() => { this.log(`Media-keys set for "${keySystem}"`); queue.push(setMediaKeysPromise); this.setMediaKeysQueue = this.setMediaKeysQueue.filter(p => queue.indexOf(p) === -1); }); } generateRequestWithPreferredKeySession(context, initDataType, initData, reason) { var _this$config$drmSyste, _this$config$drmSyste2; const generateRequestFilter = (_this$config$drmSyste = this.config.drmSystems) == null ? void 0 : (_this$config$drmSyste2 = _this$config$drmSyste[context.keySystem]) == null ? void 0 : _this$config$drmSyste2.generateRequest; if (generateRequestFilter) { try { const mappedInitData = generateRequestFilter.call(this.hls, initDataType, initData, context); if (!mappedInitData) { throw new Error('Invalid response from configured generateRequest filter'); } initDataType = mappedInitData.initDataType; initData = context.decryptdata.pssh = mappedInitData.initData ? new Uint8Array(mappedInitData.initData) : null; } catch (error) { var _this$hls; this.warn(error.message); if ((_this$hls = this.hls) != null && _this$hls.config.debug) { throw error; } } } if (initData === null) { this.log(`Skipping key-session request for "${reason}" (no initData)`); return Promise.resolve(context); } const keyId = this.getKeyIdString(context.decryptdata); this.log(`Generating key-session request for "${reason}": ${keyId} (init data type: ${initDataType} length: ${initData ? initData.byteLength : null})`); const licenseStatus = new EventEmitter(); context.mediaKeysSession.onmessage = event => { const keySession = context.mediaKeysSession; if (!keySession) { licenseStatus.emit('error', new Error('invalid state')); return; } const { messageType, message } = event; this.log(`"${messageType}" message event for session "${keySession.sessionId}" message size: ${message.byteLength}`); if (messageType === 'license-request' || messageType === 'license-renewal') { this.renewLicense(context, message).catch(error => { this.handleError(error); licenseStatus.emit('error', error); }); } else if (messageType === 'license-release') { if (context.keySystem === KeySystems.FAIRPLAY) { this.updateKeySession(context, strToUtf8array('acknowledged')); this.removeSession(context); } } else { this.warn(`unhandled media key message type "${messageType}"`); } }; context.mediaKeysSession.onkeystatuseschange = event => { const keySession = context.mediaKeysSession; if (!keySession) { licenseStatus.emit('error', new Error('invalid state')); return; } this.onKeyStatusChange(context); const keyStatus = context.keyStatus; licenseStatus.emit('keyStatus', keyStatus); if (keyStatus === 'expired') { this.warn(`${context.keySystem} expired for key ${keyId}`); this.renewKeySession(context); } }; const keyUsablePromise = new Promise((resolve, reject) => { licenseStatus.on('error', reject); licenseStatus.on('keyStatus', keyStatus => { if (keyStatus.startsWith('usable')) { resolve(); } else if (keyStatus === 'output-restricted') { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED, fatal: false }, 'HDCP level output restricted')); } else if (keyStatus === 'internal-error') { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_STATUS_INTERNAL_ERROR, fatal: true }, `key status changed to "${keyStatus}"`)); } else if (keyStatus === 'expired') { reject(new Error('key expired while generating request')); } else { this.warn(`unhandled key status change "${keyStatus}"`); } }); }); return context.mediaKeysSession.generateRequest(initDataType, initData).then(() => { var _context$mediaKeysSes; this.log(`Request generated for key-session "${(_context$mediaKeysSes = context.mediaKeysSession) == null ? void 0 : _context$mediaKeysSes.sessionId}" keyId: ${keyId}`); }).catch(error => { throw new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_NO_SESSION, error, fatal: false }, `Error generating key-session request: ${error}`); }).then(() => keyUsablePromise).catch(error => { licenseStatus.removeAllListeners(); this.removeSession(context); throw error; }).then(() => { licenseStatus.removeAllListeners(); return context; }); } onKeyStatusChange(mediaKeySessionContext) { mediaKeySessionContext.mediaKeysSession.keyStatuses.forEach((status, keyId) => { this.log(`key status change "${status}" for keyStatuses keyId: ${Hex.hexDump('buffer' in keyId ? new Uint8Array(keyId.buffer, keyId.byteOffset, keyId.byteLength) : new Uint8Array(keyId))} session keyId: ${Hex.hexDump(new Uint8Array(mediaKeySessionContext.decryptdata.keyId || []))} uri: ${mediaKeySessionContext.decryptdata.uri}`); mediaKeySessionContext.keyStatus = status; }); } fetchServerCertificate(keySystem) { const config = this.config; const Loader = config.loader; const certLoader = new Loader(config); const url = this.getServerCertificateUrl(keySystem); if (!url) { return Promise.resolve(); } this.log(`Fetching serverCertificate for "${keySystem}"`); return new Promise((resolve, reject) => { const loaderContext = { responseType: 'arraybuffer', url }; const loadPolicy = config.certLoadPolicy.default; const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: 0, retryDelay: 0, maxRetryDelay: 0 }; const loaderCallbacks = { onSuccess: (response, stats, context, networkDetails) => { resolve(response.data); }, onError: (response, contex, networkDetails, stats) => { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED, fatal: true, networkDetails, response: _objectSpread2({ url: loaderContext.url, data: undefined }, response) }, `"${keySystem}" certificate request failed (${url}). Status: ${response.code} (${response.text})`)); }, onTimeout: (stats, context, networkDetails) => { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED, fatal: true, networkDetails, response: { url: loaderContext.url, data: undefined } }, `"${keySystem}" certificate request timed out (${url})`)); }, onAbort: (stats, context, networkDetails) => { reject(new Error('aborted')); } }; certLoader.load(loaderContext, loaderConfig, loaderCallbacks); }); } setMediaKeysServerCertificate(mediaKeys, keySystem, cert) { return new Promise((resolve, reject) => { mediaKeys.setServerCertificate(cert).then(success => { this.log(`setServerCertificate ${success ? 'success' : 'not supported by CDM'} (${cert == null ? void 0 : cert.byteLength}) on "${keySystem}"`); resolve(mediaKeys); }).catch(error => { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED, error, fatal: true }, error.message)); }); }); } renewLicense(context, keyMessage) { return this.requestLicense(context, new Uint8Array(keyMessage)).then(data => { return this.updateKeySession(context, new Uint8Array(data)).catch(error => { throw new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_SESSION_UPDATE_FAILED, error, fatal: true }, error.message); }); }); } setupLicenseXHR(xhr, url, keysListItem, licenseChallenge) { const licenseXhrSetup = this.config.licenseXhrSetup; if (!licenseXhrSetup) { xhr.open('POST', url, true); return Promise.resolve({ xhr, licenseChallenge }); } return Promise.resolve().then(() => { if (!keysListItem.decryptdata) { throw new Error('Key removed'); } return licenseXhrSetup.call(this.hls, xhr, url, keysListItem, licenseChallenge); }).catch(error => { if (!keysListItem.decryptdata) { // Key session removed. Cancel license request. throw error; } // let's try to open before running setup xhr.open('POST', url, true); return licenseXhrSetup.call(this.hls, xhr, url, keysListItem, licenseChallenge); }).then(licenseXhrSetupResult => { // if licenseXhrSetup did not yet call open, let's do it now if (!xhr.readyState) { xhr.open('POST', url, true); } const finalLicenseChallenge = licenseXhrSetupResult ? licenseXhrSetupResult : licenseChallenge; return { xhr, licenseChallenge: finalLicenseChallenge }; }); } requestLicense(keySessionContext, licenseChallenge) { const keyLoadPolicy = this.config.keyLoadPolicy.default; return new Promise((resolve, reject) => { const url = this.getLicenseServerUrl(keySessionContext.keySystem); this.log(`Sending license request to URL: ${url}`); const xhr = new XMLHttpRequest(); xhr.responseType = 'arraybuffer'; xhr.onreadystatechange = () => { if (!this.hls || !keySessionContext.mediaKeysSession) { return reject(new Error('invalid state')); } if (xhr.readyState === 4) { if (xhr.status === 200) { this._requestLicenseFailureCount = 0; let data = xhr.response; this.log(`License received ${data instanceof ArrayBuffer ? data.byteLength : data}`); const licenseResponseCallback = this.config.licenseResponseCallback; if (licenseResponseCallback) { try { data = licenseResponseCallback.call(this.hls, xhr, url, keySessionContext); } catch (error) { this.error(error); } } resolve(data); } else { const retryConfig = keyLoadPolicy.errorRetry; const maxNumRetry = retryConfig ? retryConfig.maxNumRetry : 0; this._requestLicenseFailureCount++; if (this._requestLicenseFailureCount > maxNumRetry || xhr.status >= 400 && xhr.status < 500) { reject(new EMEKeyError({ type: ErrorTypes.KEY_SYSTEM_ERROR, details: ErrorDetails.KEY_SYSTEM_LICENSE_REQUEST_FAILED, fatal: true, networkDetails: xhr, response: { url, data: undefined, code: xhr.status, text: xhr.statusText } }, `License Request XHR failed (${url}). Status: ${xhr.status} (${xhr.statusText})`)); } else { const attemptsLeft = maxNumRetry - this._requestLicenseFailureCount + 1; this.warn(`Retrying license request, ${attemptsLeft} attempts left`); this.requestLicense(keySessionContext, licenseChallenge).then(resolve, reject); } } } }; if (keySessionContext.licenseXhr && keySessionContext.licenseXhr.readyState !== XMLHttpRequest.DONE) { keySessionContext.licenseXhr.abort(); } keySessionContext.licenseXhr = xhr; this.setupLicenseXHR(xhr, url, keySessionContext, licenseChallenge).then(({ xhr, licenseChallenge }) => { xhr.send(licenseChallenge); }); }); } onMediaAttached(event, data) { if (!this.config.emeEnabled) { return; } const media = data.media; // keep reference of media this.media = media; media.addEventListener('encrypted', this.onMediaEncrypted); media.addEventListener('waitingforkey', this.onWaitingForKey); } onMediaDetached() { const media = this.media; const mediaKeysList = this.mediaKeySessions; if (media) { media.removeEventListener('encrypted', this.onMediaEncrypted); media.removeEventListener('waitingforkey', this.onWaitingForKey); this.media = null; } this._requestLicenseFailureCount = 0; this.setMediaKeysQueue = []; this.mediaKeySessions = []; this.keyIdToKeySessionPromise = {}; LevelKey.clearKeyUriToKeyIdMap(); // Close all sessions and remove media keys from the video element. const keySessionCount = mediaKeysList.length; EMEController.CDMCleanupPromise = Promise.all(mediaKeysList.map(mediaKeySessionContext => this.removeSession(mediaKeySessionContext)).concat(media == null ? void 0 : media.setMediaKeys(null).catch(error => { this.log(`Could not clear media keys: ${error}. media.src: ${media == null ? void 0 : media.src}`); }))).then(() => { if (keySessionCount) { this.log('finished closing key sessions and clearing media keys'); mediaKeysList.length = 0; } }).catch(error => { this.log(`Could not close sessions and clear media keys: ${error}. media.src: ${media == null ? void 0 : media.src}`); }); } onManifestLoading() { this.keyFormatPromise = null; } onManifestLoaded(event, { sessionKeys }) { if (!sessionKeys || !this.config.emeEnabled) { return; } if (!this.keyFormatPromise) { const keyFormats = sessionKeys.reduce((formats, sessionKey) => { if (formats.indexOf(sessionKey.keyFormat) === -1) { formats.push(sessionKey.keyFormat); } return formats; }, []); this.log(`Selecting key-system from session-keys ${keyFormats.join(', ')}`); this.keyFormatPromise = this.getKeyFormatPromise(keyFormats); } } removeSession(mediaKeySessionContext) { const { mediaKeysSession, licenseXhr } = mediaKeySessionContext; if (mediaKeysSession) { this.log(`Remove licenses and keys and close session ${mediaKeysSession.sessionId}`); mediaKeysSession.onmessage = null; mediaKeysSession.onkeystatuseschange = null; if (licenseXhr && licenseXhr.readyState !== XMLHttpRequest.DONE) { licenseXhr.abort(); } mediaKeySessionContext.mediaKeysSession = mediaKeySessionContext.decryptdata = mediaKeySessionContext.licenseXhr = undefined; const index = this.mediaKeySessions.indexOf(mediaKeySessionContext); if (index > -1) { this.mediaKeySessions.splice(index, 1); } return mediaKeysSession.remove().catch(error => { this.log(`Could not remove session: ${error}`); }).then(() => { return mediaKeysSession.close(); }).catch(error => { this.log(`Could not close session: ${error}`); }); } } } EMEController.CDMCleanupPromise = void 0; class EMEKeyError extends Error { constructor(data, message) { super(message); this.data = void 0; data.error || (data.error = new Error(message)); this.data = data; data.err = data.error; } } /** * CMCD spec version */ const CMCDVersion = 1; /** * CMCD Object Type */ var CMCDObjectType = { MANIFEST: "m", AUDIO: "a", VIDEO: "v", MUXED: "av", INIT: "i", CAPTION: "c", TIMED_TEXT: "tt", KEY: "k", OTHER: "o" }; /** * CMCD Streaming Format */ const CMCDStreamingFormatHLS = 'h'; /** * CMCD Streaming Type */ /** * CMCD Headers */ /** * CMCD */ /** * Controller to deal with Common Media Client Data (CMCD) * @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf */ class CMCDController { // eslint-disable-line no-restricted-globals // eslint-disable-line no-restricted-globals constructor(hls) { this.hls = void 0; this.config = void 0; this.media = void 0; this.sid = void 0; this.cid = void 0; this.useHeaders = false; this.initialized = false; this.starved = false; this.buffering = true; this.audioBuffer = void 0; this.videoBuffer = void 0; this.onWaiting = () => { if (this.initialized) { this.starved = true; } this.buffering = true; }; this.onPlaying = () => { if (!this.initialized) { this.initialized = true; } this.buffering = false; }; /** * Apply CMCD data to a manifest request. */ this.applyPlaylistData = context => { try { this.apply(context, { ot: CMCDObjectType.MANIFEST, su: !this.initialized }); } catch (error) { logger.warn('Could not generate manifest CMCD data.', error); } }; /** * Apply CMCD data to a segment request */ this.applyFragmentData = context => { try { const fragment = context.frag; const level = this.hls.levels[fragment.level]; const ot = this.getObjectType(fragment); const data = { d: fragment.duration * 1000, ot }; if (ot === CMCDObjectType.VIDEO || ot === CMCDObjectType.AUDIO || ot == CMCDObjectType.MUXED) { data.br = level.bitrate / 1000; data.tb = this.getTopBandwidth(ot) / 1000; data.bl = this.getBufferLength(ot); } this.apply(context, data); } catch (error) { logger.warn('Could not generate segment CMCD data.', error); } }; this.hls = hls; const config = this.config = hls.config; const { cmcd } = config; if (cmcd != null) { config.pLoader = this.createPlaylistLoader(); config.fLoader = this.createFragmentLoader(); this.sid = cmcd.sessionId || CMCDController.uuid(); this.cid = cmcd.contentId; this.useHeaders = cmcd.useHeaders === true; this.registerListeners(); } } registerListeners() { const hls = this.hls; hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.on(Events.MEDIA_DETACHED, this.onMediaDetached, this); hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this); } unregisterListeners() { const hls = this.hls; hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this); hls.off(Events.MEDIA_DETACHED, this.onMediaDetached, this); hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this); } destroy() { this.unregisterListeners(); this.onMediaDetached(); // @ts-ignore this.hls = this.config = this.audioBuffer = this.videoBuffer = null; } onMediaAttached(event, data) { this.media = data.media; this.media.addEventListener('waiting', this.onWaiting); this.media.addEventListener('playing', this.onPlaying); } onMediaDetached() { if (!this.media) { return; } this.media.removeEventListener('waiting', this.onWaiting); this.media.removeEventListener('playing', this.onPlaying); // @ts-ignore this.media = null; } onBufferCreated(event, data) { var _data$tracks$audio, _data$tracks$video; this.audioBuffer = (_data$tracks$audio = data.tracks.audio) == null ? void 0 : _data$tracks$audio.buffer; this.videoBuffer = (_data$tracks$video = data.tracks.video) == null ? void 0 : _data$tracks$video.buffer; } /** * Create baseline CMCD data */ createData() { var _this$media; return { v: CMCDVersion, sf: CMCDStreamingFormatHLS, sid: this.sid, cid: this.cid, pr: (_this$media = this.media) == null ? void 0 : _this$media.playbackRate, mtp: this.hls.bandwidthEstimate / 1000 }; } /** * Apply CMCD data to a request. */ apply(context, data = {}) { // apply baseline data _extends(data, this.createData()); const isVideo = data.ot === CMCDObjectType.INIT || data.ot === CMCDObjectType.VIDEO || data.ot === CMCDObjectType.MUXED; if (this.starved && isVideo) { data.bs = true; data.su = true; this.starved = false; } if (data.su == null) { data.su = this.buffering; } // TODO: Implement rtp, nrr, nor, dl if (this.useHeaders) { const headers = CMCDController.toHeaders(data); if (!Object.keys(headers).length) { return; } if (!context.headers) { context.headers = {}; } _extends(context.headers, headers); } else { const query = CMCDController.toQuery(data); if (!query) { return; } context.url = CMCDController.appendQueryToUri(context.url, query); } } /** * The CMCD object type. */ getObjectType(fragment) { const { type } = fragment; if (type === 'subtitle') { return CMCDObjectType.TIMED_TEXT; } if (fragment.sn === 'initSegment') { return CMCDObjectType.INIT; } if (type === 'audio') { return CMCDObjectType.AUDIO; } if (type === 'main') { if (!this.hls.audioTracks.length) { return CMCDObjectType.MUXED; } return CMCDObjectType.VIDEO; } return undefined; } /** * Get the highest bitrate. */ getTopBandwidth(type) { let bitrate = 0; let levels; const hls = this.hls; if (type === CMCDObjectType.AUDIO) { levels = hls.audioTracks; } else { const max = hls.maxAutoLevel; const len = max > -1 ? max + 1 : hls.levels.length; levels = hls.levels.slice(0, len); } for (const level of levels) { if (level.bitrate > bitrate) { bitrate = level.bitrate; } } return bitrate > 0 ? bitrate : NaN; } /** * Get the buffer length for a media type in milliseconds */ getBufferLength(type) { const media = this.hls.media; const buffer = type === CMCDObjectType.AUDIO ? this.audioBuffer : this.videoBuffer; if (!buffer || !media) { return NaN; } const info = BufferHelper.bufferInfo(buffer, media.currentTime, this.config.maxBufferHole); return info.len * 1000; } /** * Create a playlist loader */ createPlaylistLoader() { const { pLoader } = this.config; const apply = this.applyPlaylistData; const Ctor = pLoader || this.config.loader; return class CmcdPlaylistLoader { constructor(config) { this.loader = void 0; this.loader = new Ctor(config); } get stats() { return this.loader.stats; } get context() { return this.loader.context; } destroy() { this.loader.destroy(); } abort() { this.loader.abort(); } load(context, config, callbacks) { apply(context); this.loader.load(context, config, callbacks); } }; } /** * Create a playlist loader */ createFragmentLoader() { const { fLoader } = this.config; const apply = this.applyFragmentData; const Ctor = fLoader || this.config.loader; return class CmcdFragmentLoader { constructor(config) { this.loader = void 0; this.loader = new Ctor(config); } get stats() { return this.loader.stats; } get context() { return this.loader.context; } destroy() { this.loader.destroy(); } abort() { this.loader.abort(); } load(context, config, callbacks) { apply(context); this.loader.load(context, config, callbacks); } }; } /** * Generate a random v4 UUI * * @returns {string} */ static uuid() { const url = URL.createObjectURL(new Blob()); const uuid = url.toString(); URL.revokeObjectURL(url); return uuid.slice(uuid.lastIndexOf('/') + 1); } /** * Serialize a CMCD data object according to the rules defined in the * section 3.2 of * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf). */ static serialize(data) { const results = []; const isValid = value => !Number.isNaN(value) && value != null && value !== '' && value !== false; const toRounded = value => Math.round(value); const toHundred = value => toRounded(value / 100) * 100; const toUrlSafe = value => encodeURIComponent(value); const formatters = { br: toRounded, d: toRounded, bl: toHundred, dl: toHundred, mtp: toHundred, nor: toUrlSafe, rtp: toHundred, tb: toRounded }; const keys = Object.keys(data || {}).sort(); for (const key of keys) { let value = data[key]; // ignore invalid values if (!isValid(value)) { continue; } // Version should only be reported if not equal to 1. if (key === 'v' && value === 1) { continue; } // Playback rate should only be sent if not equal to 1. if (key == 'pr' && value === 1) { continue; } // Certain values require special formatting const formatter = formatters[key]; if (formatter) { value = formatter(value); } // Serialize the key/value pair const type = typeof value; let result; if (key === 'ot' || key === 'sf' || key === 'st') { result = `${key}=${value}`; } else if (type === 'boolean') { result = key; } else if (type === 'number') { result = `${key}=${value}`; } else { result = `${key}=${JSON.stringify(value)}`; } results.push(result); } return results.join(','); } /** * Convert a CMCD data object to request headers according to the rules * defined in the section 2.1 and 3.2 of * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf). */ static toHeaders(data) { const keys = Object.keys(data); const headers = {}; const headerNames = ['Object', 'Request', 'Session', 'Status']; const headerGroups = [{}, {}, {}, {}]; const headerMap = { br: 0, d: 0, ot: 0, tb: 0, bl: 1, dl: 1, mtp: 1, nor: 1, nrr: 1, su: 1, cid: 2, pr: 2, sf: 2, sid: 2, st: 2, v: 2, bs: 3, rtp: 3 }; for (const key of keys) { // Unmapped fields are mapped to the Request header const index = headerMap[key] != null ? headerMap[key] : 1; headerGroups[index][key] = data[key]; } for (let i = 0; i < headerGroups.length; i++) { const value = CMCDController.serialize(headerGroups[i]); if (value) { headers[`CMCD-${headerNames[i]}`] = value; } } return headers; } /** * Convert a CMCD data object to query args according to the rules * defined in the section 2.2 and 3.2 of * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf). */ static toQuery(data) { return `CMCD=${encodeURIComponent(CMCDController.serialize(data))}`; } /** * Append query args to a uri. */ static appendQueryToUri(uri, query) { if (!query) { return uri; } const separator = uri.includes('?') ? '&' : '?'; return `${uri}${separator}${query}`; } } const PATHWAY_PENALTY_DURATION_MS = 300000; class ContentSteeringController { constructor(hls) { this.hls = void 0; this.log = void 0; this.loader = null; this.uri = null; this.pathwayId = '.'; this.pathwayPriority = null; this.timeToLoad = 300; this.reloadTimer = -1; this.updated = 0; this.started = false; this.enabled = true; this.levels = null; this.audioTracks = null; this.subtitleTracks = null; this.penalizedPathways = {}; this.hls = hls; this.log = logger.log.bind(logger, `[content-steering]:`); this.registerListeners(); } registerListeners() { const hls = this.hls; hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.on(Events.ERROR, this.onError, this); } unregisterListeners() { const hls = this.hls; if (!hls) { return; } hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this); hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this); hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this); hls.off(Events.ERROR, this.onError, this); } startLoad() { this.started = true; self.clearTimeout(this.reloadTimer); if (this.enabled && this.uri) { if (this.updated) { const ttl = Math.max(this.timeToLoad * 1000 - (performance.now() - this.updated), 0); this.scheduleRefresh(this.uri, ttl); } else { this.loadSteeringManifest(this.uri); } } } stopLoad() { this.started = false; if (this.loader) { this.loader.destroy(); this.loader = null; } self.clearTimeout(this.reloadTimer); } destroy() { this.unregisterListeners(); this.stopLoad(); // @ts-ignore this.hls = null; this.levels = this.audioTracks = this.subtitleTracks = null; } removeLevel(levelToRemove) { const levels = this.levels; if (levels) { this.levels = levels.filter(level => level !== levelToRemove); } } onManifestLoading() { this.stopLoad(); this.enabled = true; this.timeToLoad = 300; this.updated = 0; this.uri = null; this.pathwayId = '.'; this.levels = this.audioTracks = this.subtitleTracks = null; } onManifestLoaded(event, data) { const { contentSteering } = data; if (contentSteering === null) { return; } this.pathwayId = contentSteering.pathwayId; this.uri = contentSteering.uri; if (this.started) { this.startLoad(); } } onManifestParsed(event, data) { this.audioTracks = data.audioTracks; this.subtitleTracks = data.subtitleTracks; } onError(event, data) { const { errorAction } = data; if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) { let pathwayPriority = this.pathwayPriority; const pathwayId = this.pathwayId; if (!this.penalizedPathways[pathwayId]) { this.penalizedPathways[pathwayId] = performance.now(); } if (!pathwayPriority && this.levels) { // If PATHWAY-PRIORITY was not provided, list pathways for error handling pathwayPriority = this.levels.reduce((pathways, level) => { if (pathways.indexOf(level.pathwayId) === -1) { pathways.push(level.pathwayId); } return pathways; }, []); } if (pathwayPriority && pathwayPriority.length > 1) { this.updatePathwayPriority(pathwayPriority); errorAction.resolved = this.pathwayId !== pathwayId; } } } filterParsedLevels(levels) { // Filter levels to only include those that are in the initial pathway this.levels = levels; let pathwayLevels = this.getLevelsForPathway(this.pathwayId); if (pathwayLevels.length === 0) { const pathwayId = levels[0].pathwayId; this.log(`No levels found in Pathway ${this.pathwayId}. Setting initial Pathway to "${pathwayId}"`); pathwayLevels = this.getLevelsForPathway(pathwayId); this.pathwayId = pathwayId; } if (pathwayLevels.length !== levels.length) { this.log(`Found ${pathwayLevels.length}/${levels.length} levels in Pathway "${this.pathwayId}"`); return pathwayLevels; } return levels; } getLevelsForPathway(pathwayId) { if (this.levels === null) { return []; } return this.levels.filter(level => pathwayId === level.pathwayId); } updatePathwayPriority(pathwayPriority) { this.pathwayPriority = pathwayPriority; let levels; // Evaluate if we should remove the pathway from the penalized list const penalizedPathways = this.penalizedPathways; const now = performance.now(); Object.keys(penalizedPathways).forEach(pathwayId => { if (now - penalizedPathways[pathwayId] > PATHWAY_PENALTY_DURATION_MS) { delete penalizedPathways[pathwayId]; } }); for (let i = 0; i < pathwayPriority.length; i++) { const pathwayId = pathwayPriority[i]; if (penalizedPathways[pathwayId]) { continue; } if (pathwayId === this.pathwayId) { return; } const selectedIndex = this.hls.nextLoadLevel; const selectedLevel = this.hls.levels[selectedIndex]; levels = this.getLevelsForPathway(pathwayId); if (levels.length > 0) { this.log(`Setting Pathway to "${pathwayId}"`); this.pathwayId = pathwayId; this.hls.trigger(Events.LEVELS_UPDATED, { levels }); // Set LevelController's level to trigger LEVEL_SWITCHING which loads playlist if needed const levelAfterChange = this.hls.levels[selectedIndex]; if (selectedLevel && levelAfterChange && this.levels) { if (levelAfterChange.attrs['STABLE-VARIANT-ID'] !== selectedLevel.attrs['STABLE-VARIANT-ID'] && levelAfterChange.bitrate !== selectedLevel.bitrate) { this.log(`Unstable Pathways change from bitrate ${selectedLevel.bitrate} to ${levelAfterChange.bitrate}`); } this.hls.nextLoadLevel = selectedIndex; } break; } } } clonePathways(pathwayClones) { const levels = this.levels; if (!levels) { return; } const audioGroupCloneMap = {}; const subtitleGroupCloneMap = {}; pathwayClones.forEach(pathwayClone => { const { ID: cloneId, 'BASE-ID': baseId, 'URI-REPLACEMENT': uriReplacement } = pathwayClone; if (levels.some(level => level.pathwayId === cloneId)) { return; } const clonedVariants = this.getLevelsForPathway(baseId).map(baseLevel => { const levelParsed = _extends({}, baseLevel); levelParsed.details = undefined; levelParsed.url = performUriReplacement(baseLevel.uri, baseLevel.attrs['STABLE-VARIANT-ID'], 'PER-VARIANT-URIS', uriReplacement); const attributes = new AttrList(baseLevel.attrs); attributes['PATHWAY-ID'] = cloneId; const clonedAudioGroupId = attributes.AUDIO && `${attributes.AUDIO}_clone_${cloneId}`; const clonedSubtitleGroupId = attributes.SUBTITLES && `${attributes.SUBTITLES}_clone_${cloneId}`; if (clonedAudioGroupId) { audioGroupCloneMap[attributes.AUDIO] = clonedAudioGroupId; attributes.AUDIO = clonedAudioGroupId; } if (clonedSubtitleGroupId) { subtitleGroupCloneMap[attributes.SUBTITLES] = clonedSubtitleGroupId; attributes.SUBTITLES = clonedSubtitleGroupId; } levelParsed.attrs = attributes; const clonedLevel = new Level(levelParsed); addGroupId(clonedLevel, 'audio', clonedAudioGroupId); addGroupId(clonedLevel, 'text', clonedSubtitleGroupId); return clonedLevel; }); levels.push(...clonedVariants); cloneRenditionGroups(this.audioTracks, audioGroupCloneMap, uriReplacement, cloneId); cloneRenditionGroups(this.subtitleTracks, subtitleGroupCloneMap, uriReplacement, cloneId); }); } loadSteeringManifest(uri) { const config = this.hls.config; const Loader = config.loader; if (this.loader) { this.loader.destroy(); } this.loader = new Loader(config); let url; try { url = new self.URL(uri); } catch (error) { this.enabled = false; this.log(`Failed to parse Steering Manifest URI: ${uri}`); return; } if (url.protocol !== 'data:') { const throughput = (this.hls.bandwidthEstimate || config.abrEwmaDefaultEstimate) | 0; url.searchParams.set('_HLS_pathway', this.pathwayId); url.searchParams.set('_HLS_throughput', '' + throughput); } const context = { responseType: 'json', url: url.href }; const loadPolicy = config.steeringManifestLoadPolicy.default; const legacyRetryCompatibility = loadPolicy.errorRetry || loadPolicy.timeoutRetry || {}; const loaderConfig = { loadPolicy, timeout: loadPolicy.maxLoadTimeMs, maxRetry: legacyRetryCompatibility.maxNumRetry || 0, retryDelay: legacyRetryCompatibility.retryDelayMs || 0, maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0 }; const callbacks = { onSuccess: (response, stats, context, networkDetails) => { this.log(`Loaded steering manifest: "${url}"`); const steeringData = response.data; if (steeringData.VERSION !== 1) { this.log(`Steering VERSION ${steeringData.VERSION} not supported!`); return; } this.updated = performance.now(); this.timeToLoad = steeringData.TTL; const { 'RELOAD-URI': reloadUri, 'PATHWAY-CLONES': pathwayClones, 'PATHWAY-PRIORITY': pathwayPriority } = steeringData; if (reloadUri) { try { this.uri = new self.URL(reloadUri, url).href; } catch (error) { this.enabled = false; this.log(`Failed to parse Steering Manifest RELOAD-URI: ${reloadUri}`); return; } } this.scheduleRefresh(this.uri || context.url); if (pathwayClones) { this.clonePathways(pathwayClones); } if (pathwayPriority) { this.updatePathwayPriority(pathwayPriority); } }, onError: (error, context, networkDetails, stats) => { this.log(`Error loading steering manifest: ${error.code} ${error.text} (${context.url})`); this.stopLoad(); if (error.code === 410) { this.enabled = false; this.log(`Steering manifest ${context.url} no longer available`); return; } let ttl = this.timeToLoad * 1000; if (error.code === 429) { const loader = this.loader; if (typeof (loader == null ? void 0 : loader.getResponseHeader) === 'function') { const retryAfter = loader.getResponseHeader('Retry-After'); if (retryAfter) { ttl = parseFloat(retryAfter) * 1000; } } this.log(`Steering manifest ${context.url} rate limited`); return; } this.scheduleRefresh(this.uri || context.url, ttl); }, onTimeout: (stats, context, networkDetails) => { this.log(`Timeout loading steering manifest (${context.url})`); this.scheduleRefresh(this.uri || context.url); } }; this.log(`Requesting steering manifest: ${url}`); this.loader.load(context, loaderConfig, callbacks); } scheduleRefresh(uri, ttlMs = this.timeToLoad * 1000) { self.clearTimeout(this.reloadTimer); this.reloadTimer = self.setTimeout(() => { this.loadSteeringManifest(uri); }, ttlMs); } } function cloneRenditionGroups(tracks, groupCloneMap, uriReplacement, cloneId) { if (!tracks) { return; } Object.keys(groupCloneMap).forEach(audioGroupId => { const clonedTracks = tracks.filter(track => track.groupId === audioGroupId).map(track => { const clonedTrack = _extends({}, track); clonedTrack.details = undefined; clonedTrack.attrs = new AttrList(clonedTrack.attrs); clonedTrack.url = clonedTrack.attrs.URI = performUriReplacement(track.url, track.attrs['STABLE-RENDITION-ID'], 'PER-RENDITION-URIS', uriReplacement); clonedTrack.groupId = clonedTrack.attrs['GROUP-ID'] = groupCloneMap[audioGroupId]; clonedTrack.attrs['PATHWAY-ID'] = cloneId; return clonedTrack; }); tracks.push(...clonedTracks); }); } function performUriReplacement(uri, stableId, perOptionKey, uriReplacement) { const { HOST: host, PARAMS: params, [perOptionKey]: perOptionUris } = uriReplacement; let perVariantUri; if (stableId) { perVariantUri = perOptionUris == null ? void 0 : perOptionUris[stableId]; if (perVariantUri) { uri = perVariantUri; } } const url = new self.URL(uri); if (host && !perVariantUri) { url.host = host; } if (params) { Object.keys(params).sort().forEach(key => { if (key) { url.searchParams.set(key, params[key]); } }); } return url.href; } const AGE_HEADER_LINE_REGEX = /^age:\s*[\d.]+\s*$/im; class XhrLoader$1 { constructor(config) { this.xhrSetup = void 0; this.requestTimeout = void 0; this.retryTimeout = void 0; this.retryDelay = void 0; this.config = null; this.callbacks = null; this.context = void 0; this.loader = null; this.stats = void 0; this.xhrSetup = config ? config.xhrSetup || null : null; this.stats = new LoadStats(); this.retryDelay = 0; } destroy() { this.callbacks = null; this.abortInternal(); this.loader = null; this.config = null; } abortInternal() { const loader = this.loader; self.clearTimeout(this.requestTimeout); self.clearTimeout(this.retryTimeout); if (loader) { loader.onreadystatechange = null; loader.onprogress = null; if (loader.readyState !== 4) { this.stats.aborted = true; loader.abort(); } } } abort() { var _this$callbacks; this.abortInternal(); if ((_this$callbacks = this.callbacks) != null && _this$callbacks.onAbort) { this.callbacks.onAbort(this.stats, this.context, this.loader); } } load(context, config, callbacks) { if (this.stats.loading.start) { throw new Error('Loader can only be used once.'); } this.stats.loading.start = self.performance.now(); this.context = context; this.config = config; this.callbacks = callbacks; this.loadInternal(); } loadInternal() { const { config, context } = this; if (!config) { return; } const xhr = this.loader = new self.XMLHttpRequest(); const stats = this.stats; stats.loading.first = 0; stats.loaded = 0; stats.aborted = false; const xhrSetup = this.xhrSetup; if (xhrSetup) { Promise.resolve().then(() => { if (this.stats.aborted) return; return xhrSetup(xhr, context.url); }).catch(error => { xhr.open('GET', context.url, true); return xhrSetup(xhr, context.url); }).then(() => { if (this.stats.aborted) return; this.openAndSendXhr(xhr, context, config); }).catch(error => { // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS this.callbacks.onError({ code: xhr.status, text: error.message }, context, xhr, stats); return; }); } else { this.openAndSendXhr(xhr, context, config); } } openAndSendXhr(xhr, context, config) { if (!xhr.readyState) { xhr.open('GET', context.url, true); } const headers = this.context.headers; const { maxTimeToFirstByteMs, maxLoadTimeMs } = config.loadPolicy; if (headers) { for (const header in headers) { xhr.setRequestHeader(header, headers[header]); } } if (context.rangeEnd) { xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1)); } xhr.onreadystatechange = this.readystatechange.bind(this); xhr.onprogress = this.loadprogress.bind(this); xhr.responseType = context.responseType; // setup timeout before we perform request self.clearTimeout(this.requestTimeout); config.timeout = maxTimeToFirstByteMs && isFiniteNumber(maxTimeToFirstByteMs) ? maxTimeToFirstByteMs : maxLoadTimeMs; this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout); xhr.send(); } readystatechange() { const { context, loader: xhr, stats } = this; if (!context || !xhr) { return; } const readyState = xhr.readyState; const config = this.config; // don't proceed if xhr has been aborted if (stats.aborted) { return; } // >= HEADERS_RECEIVED if (readyState >= 2) { if (stats.loading.first === 0) { stats.loading.first = Math.max(self.performance.now(), stats.loading.start); // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet if (config.timeout !== config.loadPolicy.maxLoadTimeMs) { self.clearTimeout(this.requestTimeout); config.timeout = config.loadPolicy.maxLoadTimeMs; this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.loadPolicy.maxLoadTimeMs - (stats.loading.first - stats.loading.start)); } } if (readyState === 4) { self.clearTimeout(this.requestTimeout); xhr.onreadystatechange = null; xhr.onprogress = null; const status = xhr.status; // http status between 200 to 299 are all successful const useResponse = xhr.responseType !== 'text'; if (status >= 200 && status < 300 && (useResponse && xhr.response || xhr.responseText !== null)) { stats.loading.end = Math.max(self.performance.now(), stats.loading.first); const data = useResponse ? xhr.response : xhr.responseText; const len = xhr.responseType === 'arraybuffer' ? data.byteLength : data.length; stats.loaded = stats.total = len; stats.bwEstimate = stats.total * 8000 / (stats.loading.end - stats.loading.first); if (!this.callbacks) { return; } const onProgress = this.callbacks.onProgress; if (onProgress) { onProgress(stats, context, data, xhr); } if (!this.callbacks) { return; } const response = { url: xhr.responseURL, data: data, code: status }; this.callbacks.onSuccess(response, stats, context, xhr); } else { const retryConfig = config.loadPolicy.errorRetry; const retryCount = stats.retry; // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error if (shouldRetry(retryConfig, retryCount, false, status)) { this.retry(retryConfig); } else { logger.error(`${status} while loading ${context.url}`); this.callbacks.onError({ code: status, text: xhr.statusText }, context, xhr, stats); } } } } } loadtimeout() { var _this$config; const retryConfig = (_this$config = this.config) == null ? void 0 : _this$config.loadPolicy.timeoutRetry; const retryCount = this.stats.retry; if (shouldRetry(retryConfig, retryCount, true)) { this.retry(retryConfig); } else { logger.warn(`timeout while loading ${this.context.url}`); const callbacks = this.callbacks; if (callbacks) { this.abortInternal(); callbacks.onTimeout(this.stats, this.context, this.loader); } } } retry(retryConfig) { const { context, stats } = this; this.retryDelay = getRetryDelay(retryConfig, stats.retry); stats.retry++; logger.warn(`${status ? 'HTTP Status ' + status : 'Timeout'} while loading ${context.url}, retrying ${stats.retry}/${retryConfig.maxNumRetry} in ${this.retryDelay}ms`); // abort and reset internal state this.abortInternal(); this.loader = null; // schedule retry self.clearTimeout(this.retryTimeout); this.retryTimeout = self.setTimeout(this.loadInternal.bind(this), this.retryDelay); } loadprogress(event) { const stats = this.stats; stats.loaded = event.loaded; if (event.lengthComputable) { stats.total = event.total; } } getCacheAge() { let result = null; if (this.loader && AGE_HEADER_LINE_REGEX.test(this.loader.getAllResponseHeaders())) { const ageHeader = this.loader.getResponseHeader('age'); result = ageHeader ? parseFloat(ageHeader) : null; } return result; } getResponseHeader(name) { if (this.loader && new RegExp(`^${name}:\\s*[\\d.]+\\s*$`, 'im').test(this.loader.getAllResponseHeaders())) { return this.loader.getResponseHeader(name); } return null; } } function fetchSupported() { if ( // @ts-ignore self.fetch && self.AbortController && self.ReadableStream && self.Request) { try { new self.ReadableStream({}); // eslint-disable-line no-new return true; } catch (e) { /* noop */ } } return false; } const BYTERANGE = /(\d+)-(\d+)\/(\d+)/; class FetchLoader$1 { constructor(config /* HlsConfig */) { this.fetchSetup = void 0; this.requestTimeout = void 0; this.request = void 0; this.response = void 0; this.controller = void 0; this.context = void 0; this.config = null; this.callbacks = null; this.stats = void 0; this.loader = null; this.fetchSetup = config.fetchSetup || getRequest; this.controller = new self.AbortController(); this.stats = new LoadStats(); } destroy() { this.loader = this.callbacks = null; this.abortInternal(); } abortInternal() { const response = this.response; if (!(response != null && response.ok)) { this.stats.aborted = true; this.controller.abort(); } } abort() { var _this$callbacks; this.abortInternal(); if ((_this$callbacks = this.callbacks) != null && _this$callbacks.onAbort) { this.callbacks.onAbort(this.stats, this.context, this.response); } } load(context, config, callbacks) { const stats = this.stats; if (stats.loading.start) { throw new Error('Loader can only be used once.'); } stats.loading.start = self.performance.now(); const initParams = getRequestParameters(context, this.controller.signal); const onProgress = callbacks.onProgress; const isArrayBuffer = context.responseType === 'arraybuffer'; const LENGTH = isArrayBuffer ? 'byteLength' : 'length'; const { maxTimeToFirstByteMs, maxLoadTimeMs } = config.loadPolicy; this.context = context; this.config = config; this.callbacks = callbacks; this.request = this.fetchSetup(context, initParams); self.clearTimeout(this.requestTimeout); config.timeout = maxTimeToFirstByteMs && isFiniteNumber(maxTimeToFirstByteMs) ? maxTimeToFirstByteMs : maxLoadTimeMs; this.requestTimeout = self.setTimeout(() => { this.abortInternal(); callbacks.onTimeout(stats, context, this.response); }, config.timeout); self.fetch(this.request).then(response => { this.response = this.loader = response; const first = Math.max(self.performance.now(), stats.loading.start); self.clearTimeout(this.requestTimeout); config.timeout = maxLoadTimeMs; this.requestTimeout = self.setTimeout(() => { this.abortInternal(); callbacks.onTimeout(stats, context, this.response); }, maxLoadTimeMs - (first - stats.loading.start)); if (!response.ok) { const { status, statusText } = response; throw new FetchError(statusText || 'fetch, bad network response', status, response); } stats.loading.first = first; stats.total = getContentLength(response.headers) || stats.total; if (onProgress && isFiniteNumber(config.highWaterMark)) { return this.loadProgressively(response, stats, context, config.highWaterMark, onProgress); } if (isArrayBuffer) { return response.arrayBuffer(); } if (context.responseType === 'json') { return response.json(); } return response.text(); }).then(responseData => { const { response } = this; self.clearTimeout(this.requestTimeout); stats.loading.end = Math.max(self.performance.now(), stats.loading.first); const total = responseData[LENGTH]; if (total) { stats.loaded = stats.total = total; } const loaderResponse = { url: response.url, data: responseData, code: response.status }; if (onProgress && !isFiniteNumber(config.highWaterMark)) { onProgress(stats, context, responseData, response); } callbacks.onSuccess(loaderResponse, stats, context, response); }).catch(error => { self.clearTimeout(this.requestTimeout); if (stats.aborted) { return; } // CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior // when destroying, 'error' itself can be undefined const code = !error ? 0 : error.code || 0; const text = !error ? null : error.message; callbacks.onError({ code, text }, context, error ? error.details : null, stats); }); } getCacheAge() { let result = null; if (this.response) { const ageHeader = this.response.headers.get('age'); result = ageHeader ? parseFloat(ageHeader) : null; } return result; } getResponseHeader(name) { return this.response ? this.response.headers.get(name) : null; } loadProgressively(response, stats, context, highWaterMark = 0, onProgress) { const chunkCache = new ChunkCache(); const reader = response.body.getReader(); const pump = () => { return reader.read().then(data => { if (data.done) { if (chunkCache.dataLength) { onProgress(stats, context, chunkCache.flush(), response); } return Promise.resolve(new ArrayBuffer(0)); } const chunk = data.value; const len = chunk.length; stats.loaded += len; if (len < highWaterMark || chunkCache.dataLength) { // The current chunk is too small to to be emitted or the cache already has data // Push it to the cache chunkCache.push(chunk); if (chunkCache.dataLength >= highWaterMark) { // flush in order to join the typed arrays onProgress(stats, context, chunkCache.flush(), response); } } else { // If there's nothing cached already, and the chache is large enough // just emit the progress event onProgress(stats, context, chunk, response); } return pump(); }).catch(() => { /* aborted */ return Promise.reject(); }); }; return pump(); } } function getRequestParameters(context, signal) { const initParams = { method: 'GET', mode: 'cors', credentials: 'same-origin', signal, headers: new self.Headers(_extends({}, context.headers)) }; if (context.rangeEnd) { initParams.headers.set('Range', 'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1)); } return initParams; } function getByteRangeLength(byteRangeHeader) { const result = BYTERANGE.exec(byteRangeHeader); if (result) { return parseInt(result[2]) - parseInt(result[1]) + 1; } } function getContentLength(headers) { const contentRange = headers.get('Content-Range'); if (contentRange) { const byteRangeLength = getByteRangeLength(contentRange); if (isFiniteNumber(byteRangeLength)) { return byteRangeLength; } } const contentLength = headers.get('Content-Length'); if (contentLength) { return parseInt(contentLength); } } function getRequest(context, initParams) { return new self.Request(context.url, initParams); } class FetchError extends Error { constructor(message, code, details) { super(message); this.code = void 0; this.details = void 0; this.code = code; this.details = details; } } const WHITESPACE_CHAR = /\s/; const Cues = { newCue(track, startTime, endTime, captionScreen) { const result = []; let row; // the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers let cue; let indenting; let indent; let text; const Cue = self.VTTCue || self.TextTrackCue; for (let r = 0; r < captionScreen.rows.length; r++) { row = captionScreen.rows[r]; indenting = true; indent = 0; text = ''; if (!row.isEmpty()) { var _track$cues; for (let c = 0; c < row.chars.length; c++) { if (WHITESPACE_CHAR.test(row.chars[c].uchar) && indenting) { indent++; } else { text += row.chars[c].uchar; indenting = false; } } // To be used for cleaning-up orphaned roll-up captions row.cueStartTime = startTime; // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE if (startTime === endTime) { endTime += 0.0001; } if (indent >= 16) { indent--; } else { indent++; } const cueText = fixLineBreaks(text.trim()); const id = generateCueId(startTime, endTime, cueText); // If this cue already exists in the track do not push it if (!(track != null && (_track$cues = track.cues) != null && _track$cues.getCueById(id))) { cue = new Cue(startTime, endTime, cueText); cue.id = id; cue.line = r + 1; cue.align = 'left'; // Clamp the position between 10 and 80 percent (CEA-608 PAC indent code) // https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608 // Firefox throws an exception and captions break with out of bounds 0-100 values cue.position = 10 + Math.min(80, Math.floor(indent * 8 / 32) * 10); result.push(cue); } } } if (track && result.length) { // Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome result.sort((cueA, cueB) => { if (cueA.line === 'auto' || cueB.line === 'auto') { return 0; } if (cueA.line > 8 && cueB.line > 8) { return cueB.line - cueA.line; } return cueA.line - cueB.line; }); result.forEach(cue => addCueToTrack(track, cue)); } return result; } }; /** * @deprecated use fragLoadPolicy.default */ /** * @deprecated use manifestLoadPolicy.default and playlistLoadPolicy.default */ const defaultLoadPolicy = { maxTimeToFirstByteMs: 8000, maxLoadTimeMs: 20000, timeoutRetry: null, errorRetry: null }; /** * @ignore * If possible, keep hlsDefaultConfig shallow * It is cloned whenever a new Hls instance is created, by keeping the config * shallow the properties are cloned, and we don't end up manipulating the default */ const hlsDefaultConfig = _objectSpread2(_objectSpread2({ autoStartLoad: true, // used by stream-controller startPosition: -1, // used by stream-controller defaultAudioCodec: undefined, // used by stream-controller debug: false, // used by logger capLevelOnFPSDrop: false, // used by fps-controller capLevelToPlayerSize: false, // used by cap-level-controller ignoreDevicePixelRatio: false, // used by cap-level-controller initialLiveManifestSize: 1, // used by stream-controller maxBufferLength: 30, // used by stream-controller backBufferLength: Infinity, // used by buffer-controller maxBufferSize: 60 * 1000 * 1000, // used by stream-controller maxBufferHole: 0.1, // used by stream-controller highBufferWatchdogPeriod: 2, // used by stream-controller nudgeOffset: 0.1, // used by stream-controller nudgeMaxRetry: 3, // used by stream-controller maxFragLookUpTolerance: 0.25, // used by stream-controller liveSyncDurationCount: 3, // used by latency-controller liveMaxLatencyDurationCount: Infinity, // used by latency-controller liveSyncDuration: undefined, // used by latency-controller liveMaxLatencyDuration: undefined, // used by latency-controller maxLiveSyncPlaybackRate: 1, // used by latency-controller liveDurationInfinity: false, // used by buffer-controller /** * @deprecated use backBufferLength */ liveBackBufferLength: null, // used by buffer-controller maxMaxBufferLength: 600, // used by stream-controller enableWorker: true, // used by transmuxer workerPath: null, // used by transmuxer enableSoftwareAES: true, // used by decrypter startLevel: undefined, // used by level-controller startFragPrefetch: false, // used by stream-controller fpsDroppedMonitoringPeriod: 5000, // used by fps-controller fpsDroppedMonitoringThreshold: 0.2, // used by fps-controller appendErrorMaxRetry: 3, // used by buffer-controller loader: XhrLoader$1, // loader: FetchLoader, fLoader: undefined, // used by fragment-loader pLoader: undefined, // used by playlist-loader xhrSetup: undefined, // used by xhr-loader licenseXhrSetup: undefined, // used by eme-controller licenseResponseCallback: undefined, // used by eme-controller abrController: AbrController, bufferController: BufferController, capLevelController: CapLevelController, errorController: ErrorController, fpsController: FPSController, stretchShortVideoTrack: false, // used by mp4-remuxer maxAudioFramesDrift: 1, // used by mp4-remuxer forceKeyFrameOnDiscontinuity: true, // used by ts-demuxer abrEwmaFastLive: 3, // used by abr-controller abrEwmaSlowLive: 9, // used by abr-controller abrEwmaFastVoD: 3, // used by abr-controller abrEwmaSlowVoD: 9, // used by abr-controller abrEwmaDefaultEstimate: 5e5, // 500 kbps // used by abr-controller abrBandWidthFactor: 0.95, // used by abr-controller abrBandWidthUpFactor: 0.7, // used by abr-controller abrMaxWithRealBitrate: false, // used by abr-controller maxStarvationDelay: 4, // used by abr-controller maxLoadingDelay: 4, // used by abr-controller minAutoBitrate: 0, // used by hls emeEnabled: false, // used by eme-controller widevineLicenseUrl: undefined, // used by eme-controller drmSystems: {}, // used by eme-controller drmSystemOptions: {}, // used by eme-controller requestMediaKeySystemAccessFunc: requestMediaKeySystemAccess , // used by eme-controller testBandwidth: true, progressive: false, lowLatencyMode: true, cmcd: undefined, enableDateRangeMetadataCues: true, enableEmsgMetadataCues: true, enableID3MetadataCues: true, certLoadPolicy: { default: defaultLoadPolicy }, keyLoadPolicy: { default: { maxTimeToFirstByteMs: 8000, maxLoadTimeMs: 20000, timeoutRetry: { maxNumRetry: 1, retryDelayMs: 1000, maxRetryDelayMs: 20000, backoff: 'linear' }, errorRetry: { maxNumRetry: 8, retryDelayMs: 1000, maxRetryDelayMs: 20000, backoff: 'linear' } } }, manifestLoadPolicy: { default: { maxTimeToFirstByteMs: Infinity, maxLoadTimeMs: 20000, timeoutRetry: { maxNumRetry: 2, retryDelayMs: 0, maxRetryDelayMs: 0 }, errorRetry: { maxNumRetry: 1, retryDelayMs: 1000, maxRetryDelayMs: 8000 } } }, playlistLoadPolicy: { default: { maxTimeToFirstByteMs: 10000, maxLoadTimeMs: 20000, timeoutRetry: { maxNumRetry: 2, retryDelayMs: 0, maxRetryDelayMs: 0 }, errorRetry: { maxNumRetry: 2, retryDelayMs: 1000, maxRetryDelayMs: 8000 } } }, fragLoadPolicy: { default: { maxTimeToFirstByteMs: 10000, maxLoadTimeMs: 120000, timeoutRetry: { maxNumRetry: 4, retryDelayMs: 0, maxRetryDelayMs: 0 }, errorRetry: { maxNumRetry: 6, retryDelayMs: 1000, maxRetryDelayMs: 8000 } } }, steeringManifestLoadPolicy: { default: { maxTimeToFirstByteMs: 10000, maxLoadTimeMs: 20000, timeoutRetry: { maxNumRetry: 2, retryDelayMs: 0, maxRetryDelayMs: 0 }, errorRetry: { maxNumRetry: 1, retryDelayMs: 1000, maxRetryDelayMs: 8000 } } }, // These default settings are deprecated in favor of the above policies // and are maintained for backwards compatibility manifestLoadingTimeOut: 10000, manifestLoadingMaxRetry: 1, manifestLoadingRetryDelay: 1000, manifestLoadingMaxRetryTimeout: 64000, levelLoadingTimeOut: 10000, levelLoadingMaxRetry: 4, levelLoadingRetryDelay: 1000, levelLoadingMaxRetryTimeout: 64000, fragLoadingTimeOut: 20000, fragLoadingMaxRetry: 6, fragLoadingRetryDelay: 1000, fragLoadingMaxRetryTimeout: 64000 }, timelineConfig()), {}, { subtitleStreamController: SubtitleStreamController , subtitleTrackController: SubtitleTrackController , timelineController: TimelineController , audioStreamController: AudioStreamController , audioTrackController: AudioTrackController , emeController: EMEController , cmcdController: CMCDController , contentSteeringController: ContentSteeringController }); function timelineConfig() { return { cueHandler: Cues, // used by timeline-controller enableWebVTT: true, // used by timeline-controller enableIMSC1: true, // used by timeline-controller enableCEA708Captions: true, // used by timeline-controller captionsTextTrack1Label: 'English', // used by timeline-controller captionsTextTrack1LanguageCode: 'en', // used by timeline-controller captionsTextTrack2Label: 'Spanish', // used by timeline-controller captionsTextTrack2LanguageCode: 'es', // used by timeline-controller captionsTextTrack3Label: 'Unknown CC', // used by timeline-controller captionsTextTrack3LanguageCode: '', // used by timeline-controller captionsTextTrack4Label: 'Unknown CC', // used by timeline-controller captionsTextTrack4LanguageCode: '', // used by timeline-controller renderTextTracksNatively: true }; } /** * @ignore */ function mergeConfig(defaultConfig, userConfig) { if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) { throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration"); } if (userConfig.liveMaxLatencyDurationCount !== undefined && (userConfig.liveSyncDurationCount === undefined || userConfig.liveMaxLatencyDurationCount <= userConfig.liveSyncDurationCount)) { throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"'); } if (userConfig.liveMaxLatencyDuration !== undefined && (userConfig.liveSyncDuration === undefined || userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)) { throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"'); } const defaultsCopy = deepCpy(defaultConfig); // Backwards compatibility with deprecated config values const deprecatedSettingTypes = ['manifest', 'level', 'frag']; const deprecatedSettings = ['TimeOut', 'MaxRetry', 'RetryDelay', 'MaxRetryTimeout']; deprecatedSettingTypes.forEach(type => { const policyName = `${type === 'level' ? 'playlist' : type}LoadPolicy`; const policyNotSet = userConfig[policyName] === undefined; const report = []; deprecatedSettings.forEach(setting => { const deprecatedSetting = `${type}Loading${setting}`; const value = userConfig[deprecatedSetting]; if (value !== undefined && policyNotSet) { report.push(deprecatedSetting); const settings = defaultsCopy[policyName].default; userConfig[policyName] = { default: settings }; switch (setting) { case 'TimeOut': settings.maxLoadTimeMs = value; settings.maxTimeToFirstByteMs = value; break; case 'MaxRetry': settings.errorRetry.maxNumRetry = value; settings.timeoutRetry.maxNumRetry = value; break; case 'RetryDelay': settings.errorRetry.retryDelayMs = value; settings.timeoutRetry.retryDelayMs = value; break; case 'MaxRetryTimeout': settings.errorRetry.maxRetryDelayMs = value; settings.timeoutRetry.maxRetryDelayMs = value; break; } } }); if (report.length) { logger.warn(`hls.js config: "${report.join('", "')}" setting(s) are deprecated, use "${policyName}": ${JSON.stringify(userConfig[policyName])}`); } }); return _objectSpread2(_objectSpread2({}, defaultsCopy), userConfig); } function deepCpy(obj) { if (obj && typeof obj === 'object') { if (Array.isArray(obj)) { return obj.map(deepCpy); } return Object.keys(obj).reduce((result, key) => { result[key] = deepCpy(obj[key]); return result; }, {}); } return obj; } /** * @ignore */ function enableStreamingMode(config) { const currentLoader = config.loader; if (currentLoader !== FetchLoader$1 && currentLoader !== XhrLoader$1) { // If a developer has configured their own loader, respect that choice logger.log('[config]: Custom loader detected, cannot enable progressive streaming'); config.progressive = false; } else { const canStreamProgressively = fetchSupported(); if (canStreamProgressively) { config.loader = FetchLoader$1; config.progressive = true; config.enableSoftwareAES = true; logger.log('[config]: Progressive streaming enabled, using FetchLoader'); } } } /** * The `Hls` class is the core of the HLS.js library used to instantiate player instances. * @public */ class Hls { /** * The runtime configuration used by the player. At instantiation this is combination of `hls.userConfig` merged over `Hls.DefaultConfig`. */ /** * The configuration object provided on player instantiation. */ /** * Get the video-dev/hls.js package version. */ static get version() { return "1.4.12"; } /** * Check if the required MediaSource Extensions are available. */ static isSupported() { return isSupported(); } static get Events() { return Events; } static get ErrorTypes() { return ErrorTypes; } static get ErrorDetails() { return ErrorDetails; } /** * Get the default configuration applied to new instances. */ static get DefaultConfig() { if (!Hls.defaultConfig) { return hlsDefaultConfig; } return Hls.defaultConfig; } /** * Replace the default configuration applied to new instances. */ static set DefaultConfig(defaultConfig) { Hls.defaultConfig = defaultConfig; } /** * Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`. * @param userConfig - Configuration options applied over `Hls.DefaultConfig` */ constructor(userConfig = {}) { this.config = void 0; this.userConfig = void 0; this.coreComponents = void 0; this.networkControllers = void 0; this._emitter = new EventEmitter(); this._autoLevelCapping = void 0; this._maxHdcpLevel = null; this.abrController = void 0; this.bufferController = void 0; this.capLevelController = void 0; this.latencyController = void 0; this.levelController = void 0; this.streamController = void 0; this.audioTrackController = void 0; this.subtitleTrackController = void 0; this.emeController = void 0; this.cmcdController = void 0; this._media = null; this.url = null; enableLogs(userConfig.debug || false, 'Hls instance'); const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig); this.userConfig = userConfig; this._autoLevelCapping = -1; if (config.progressive) { enableStreamingMode(config); } // core controllers and network loaders const { abrController: ConfigAbrController, bufferController: ConfigBufferController, capLevelController: ConfigCapLevelController, errorController: ConfigErrorController, fpsController: ConfigFpsController } = config; const errorController = new ConfigErrorController(this); const abrController = this.abrController = new ConfigAbrController(this); const bufferController = this.bufferController = new ConfigBufferController(this); const capLevelController = this.capLevelController = new ConfigCapLevelController(this); const fpsController = new ConfigFpsController(this); const playListLoader = new PlaylistLoader(this); const id3TrackController = new ID3TrackController(this); const ConfigContentSteeringController = config.contentSteeringController; // ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null; const levelController = this.levelController = new LevelController(this, contentSteering); // FragmentTracker must be defined before StreamController because the order of event handling is important const fragmentTracker = new FragmentTracker(this); const keyLoader = new KeyLoader(this.config); const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader); // Cap level controller uses streamController to flush the buffer capLevelController.setStreamController(streamController); // fpsController uses streamController to switch when frames are being dropped fpsController.setStreamController(streamController); const networkControllers = [playListLoader, levelController, streamController]; if (contentSteering) { networkControllers.splice(1, 0, contentSteering); } this.networkControllers = networkControllers; const coreComponents = [abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker]; this.audioTrackController = this.createController(config.audioTrackController, networkControllers); const AudioStreamControllerClass = config.audioStreamController; if (AudioStreamControllerClass) { networkControllers.push(new AudioStreamControllerClass(this, fragmentTracker, keyLoader)); } // subtitleTrackController must be defined before subtitleStreamController because the order of event handling is important this.subtitleTrackController = this.createController(config.subtitleTrackController, networkControllers); const SubtitleStreamControllerClass = config.subtitleStreamController; if (SubtitleStreamControllerClass) { networkControllers.push(new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader)); } this.createController(config.timelineController, coreComponents); keyLoader.emeController = this.emeController = this.createController(config.emeController, coreComponents); this.cmcdController = this.createController(config.cmcdController, coreComponents); this.latencyController = this.createController(LatencyController, coreComponents); this.coreComponents = coreComponents; // Error controller handles errors before and after all other controllers // This listener will be invoked after all other controllers error listeners networkControllers.push(errorController); const onErrorOut = errorController.onErrorOut; if (typeof onErrorOut === 'function') { this.on(Events.ERROR, onErrorOut, errorController); } } createController(ControllerClass, components) { if (ControllerClass) { const controllerInstance = new ControllerClass(this); if (components) { components.push(controllerInstance); } return controllerInstance; } return null; } // Delegate the EventEmitter through the public API of Hls.js on(event, listener, context = this) { this._emitter.on(event, listener, context); } once(event, listener, context = this) { this._emitter.once(event, listener, context); } removeAllListeners(event) { this._emitter.removeAllListeners(event); } off(event, listener, context = this, once) { this._emitter.off(event, listener, context, once); } listeners(event) { return this._emitter.listeners(event); } emit(event, name, eventObject) { return this._emitter.emit(event, name, eventObject); } trigger(event, eventObject) { if (this.config.debug) { return this.emit(event, event, eventObject); } else { try { return this.emit(event, event, eventObject); } catch (e) { logger.error('An internal error happened while handling event ' + event + '. Error message: "' + e.message + '". Here is a stacktrace:', e); this.trigger(Events.ERROR, { type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.INTERNAL_EXCEPTION, fatal: false, event: event, error: e }); } } return false; } listenerCount(event) { return this._emitter.listenerCount(event); } /** * Dispose of the instance */ destroy() { logger.log('destroy'); this.trigger(Events.DESTROYING, undefined); this.detachMedia(); this.removeAllListeners(); this._autoLevelCapping = -1; this.url = null; this.networkControllers.forEach(component => component.destroy()); this.networkControllers.length = 0; this.coreComponents.forEach(component => component.destroy()); this.coreComponents.length = 0; // Remove any references that could be held in config options or callbacks const config = this.config; config.xhrSetup = config.fetchSetup = undefined; // @ts-ignore this.userConfig = null; } /** * Attaches Hls.js to a media element */ attachMedia(media) { logger.log('attachMedia'); this._media = media; this.trigger(Events.MEDIA_ATTACHING, { media: media }); } /** * Detach Hls.js from the media */ detachMedia() { logger.log('detachMedia'); this.trigger(Events.MEDIA_DETACHING, undefined); this._media = null; } /** * Set the source URL. Can be relative or absolute. */ loadSource(url) { this.stopLoad(); const media = this.media; const loadedSource = this.url; const loadingSource = this.url = urlToolkitExports.buildAbsoluteURL(self.location.href, url, { alwaysNormalize: true }); logger.log(`loadSource:${loadingSource}`); if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) { this.detachMedia(); this.attachMedia(media); } // when attaching to a source URL, trigger a playlist load this.trigger(Events.MANIFEST_LOADING, { url: url }); } /** * Start loading data from the stream source. * Depending on default config, client starts loading automatically when a source is set. * * @param startPosition - Set the start position to stream from. * Defaults to -1 (None: starts from earliest point) */ startLoad(startPosition = -1) { logger.log(`startLoad(${startPosition})`); this.networkControllers.forEach(controller => { controller.startLoad(startPosition); }); } /** * Stop loading of any stream data. */ stopLoad() { logger.log('stopLoad'); this.networkControllers.forEach(controller => { controller.stopLoad(); }); } /** * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1) */ swapAudioCodec() { logger.log('swapAudioCodec'); this.streamController.swapAudioCodec(); } /** * When the media-element fails, this allows to detach and then re-attach it * as one call (convenience method). * * Automatic recovery of media-errors by this process is configurable. */ recoverMediaError() { logger.log('recoverMediaError'); const media = this._media; this.detachMedia(); if (media) { this.attachMedia(media); } } removeLevel(levelIndex, urlId = 0) { this.levelController.removeLevel(levelIndex, urlId); } /** * @returns an array of levels (variants) sorted by HDCP-LEVEL, BANDWIDTH, SCORE, and RESOLUTION (height) */ get levels() { const levels = this.levelController.levels; return levels ? levels : []; } /** * Index of quality level (variant) currently played */ get currentLevel() { return this.streamController.currentLevel; } /** * Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection. */ set currentLevel(newLevel) { logger.log(`set currentLevel:${newLevel}`); this.loadLevel = newLevel; this.abrController.clearTimer(); this.streamController.immediateLevelSwitch(); } /** * Index of next quality level loaded as scheduled by stream controller. */ get nextLevel() { return this.streamController.nextLevel; } /** * Set quality level index for next loaded data. * This will switch the video quality asap, without interrupting playback. * May abort current loading of data, and flush parts of buffer (outside currently played fragment region). * @param newLevel - Pass -1 for automatic level selection */ set nextLevel(newLevel) { logger.log(`set nextLevel:${newLevel}`); this.levelController.manualLevel = newLevel; this.streamController.nextLevelSwitch(); } /** * Return the quality level of the currently or last (of none is loaded currently) segment */ get loadLevel() { return this.levelController.level; } /** * Set quality level index for next loaded data in a conservative way. * This will switch the quality without flushing, but interrupt current loading. * Thus the moment when the quality switch will appear in effect will only be after the already existing buffer. * @param newLevel - Pass -1 for automatic level selection */ set loadLevel(newLevel) { logger.log(`set loadLevel:${newLevel}`); this.levelController.manualLevel = newLevel; } /** * get next quality level loaded */ get nextLoadLevel() { return this.levelController.nextLoadLevel; } /** * Set quality level of next loaded segment in a fully "non-destructive" way. * Same as `loadLevel` but will wait for next switch (until current loading is done). */ set nextLoadLevel(level) { this.levelController.nextLoadLevel = level; } /** * Return "first level": like a default level, if not set, * falls back to index of first level referenced in manifest */ get firstLevel() { return Math.max(this.levelController.firstLevel, this.minAutoLevel); } /** * Sets "first-level", see getter. */ set firstLevel(newLevel) { logger.log(`set firstLevel:${newLevel}`); this.levelController.firstLevel = newLevel; } /** * Return start level (level of first fragment that will be played back) * if not overrided by user, first level appearing in manifest will be used as start level * if -1 : automatic start level selection, playback will start from level matching download bandwidth * (determined from download of first segment) */ get startLevel() { return this.levelController.startLevel; } /** * set start level (level of first fragment that will be played back) * if not overrided by user, first level appearing in manifest will be used as start level * if -1 : automatic start level selection, playback will start from level matching download bandwidth * (determined from download of first segment) */ set startLevel(newLevel) { logger.log(`set startLevel:${newLevel}`); // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel if (newLevel !== -1) { newLevel = Math.max(newLevel, this.minAutoLevel); } this.levelController.startLevel = newLevel; } /** * Whether level capping is enabled. * Default value is set via `config.capLevelToPlayerSize`. */ get capLevelToPlayerSize() { return this.config.capLevelToPlayerSize; } /** * Enables or disables level capping. If disabled after previously enabled, `nextLevelSwitch` will be immediately called. */ set capLevelToPlayerSize(shouldStartCapping) { const newCapLevelToPlayerSize = !!shouldStartCapping; if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) { if (newCapLevelToPlayerSize) { this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size. } else { this.capLevelController.stopCapping(); this.autoLevelCapping = -1; this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap. } this.config.capLevelToPlayerSize = newCapLevelToPlayerSize; } } /** * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`) */ get autoLevelCapping() { return this._autoLevelCapping; } /** * Returns the current bandwidth estimate in bits per second, when available. Otherwise, `NaN` is returned. */ get bandwidthEstimate() { const { bwEstimator } = this.abrController; if (!bwEstimator) { return NaN; } return bwEstimator.getEstimate(); } /** * get time to first byte estimate * @type {number} */ get ttfbEstimate() { const { bwEstimator } = this.abrController; if (!bwEstimator) { return NaN; } return bwEstimator.getEstimateTTFB(); } /** * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`) */ set autoLevelCapping(newLevel) { if (this._autoLevelCapping !== newLevel) { logger.log(`set autoLevelCapping:${newLevel}`); this._autoLevelCapping = newLevel; } } get maxHdcpLevel() { return this._maxHdcpLevel; } set maxHdcpLevel(value) { if (HdcpLevels.indexOf(value) > -1) { this._maxHdcpLevel = value; } } /** * True when automatic level selection enabled */ get autoLevelEnabled() { return this.levelController.manualLevel === -1; } /** * Level set manually (if any) */ get manualLevel() { return this.levelController.manualLevel; } /** * min level selectable in auto mode according to config.minAutoBitrate */ get minAutoLevel() { const { levels, config: { minAutoBitrate } } = this; if (!levels) return 0; const len = levels.length; for (let i = 0; i < len; i++) { if (levels[i].maxBitrate >= minAutoBitrate) { return i; } } return 0; } /** * max level selectable in auto mode according to autoLevelCapping */ get maxAutoLevel() { const { levels, autoLevelCapping, maxHdcpLevel } = this; let maxAutoLevel; if (autoLevelCapping === -1 && levels && levels.length) { maxAutoLevel = levels.length - 1; } else { maxAutoLevel = autoLevelCapping; } if (maxHdcpLevel) { for (let i = maxAutoLevel; i--;) { const hdcpLevel = levels[i].attrs['HDCP-LEVEL']; if (hdcpLevel && hdcpLevel <= maxHdcpLevel) { return i; } } } return maxAutoLevel; } /** * next automatically selected quality level */ get nextAutoLevel() { // ensure next auto level is between min and max auto level return Math.min(Math.max(this.abrController.nextAutoLevel, this.minAutoLevel), this.maxAutoLevel); } /** * this setter is used to force next auto level. * this is useful to force a switch down in auto mode: * in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example) * forced value is valid for one fragment. upon successful frag loading at forced level, * this value will be resetted to -1 by ABR controller. */ set nextAutoLevel(nextLevel) { this.abrController.nextAutoLevel = Math.max(this.minAutoLevel, nextLevel); } /** * get the datetime value relative to media.currentTime for the active level Program Date Time if present */ get playingDate() { return this.streamController.currentProgramDateTime; } get mainForwardBufferInfo() { return this.streamController.getMainFwdBufferInfo(); } /** * Get the list of selectable audio tracks */ get audioTracks() { const audioTrackController = this.audioTrackController; return audioTrackController ? audioTrackController.audioTracks : []; } /** * index of the selected audio track (index in audio track lists) */ get audioTrack() { const audioTrackController = this.audioTrackController; return audioTrackController ? audioTrackController.audioTrack : -1; } /** * selects an audio track, based on its index in audio track lists */ set audioTrack(audioTrackId) { const audioTrackController = this.audioTrackController; if (audioTrackController) { audioTrackController.audioTrack = audioTrackId; } } /** * get alternate subtitle tracks list from playlist */ get subtitleTracks() { const subtitleTrackController = this.subtitleTrackController; return subtitleTrackController ? subtitleTrackController.subtitleTracks : []; } /** * index of the selected subtitle track (index in subtitle track lists) */ get subtitleTrack() { const subtitleTrackController = this.subtitleTrackController; return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1; } get media() { return this._media; } /** * select an subtitle track, based on its index in subtitle track lists */ set subtitleTrack(subtitleTrackId) { const subtitleTrackController = this.subtitleTrackController; if (subtitleTrackController) { subtitleTrackController.subtitleTrack = subtitleTrackId; } } /** * Whether subtitle display is enabled or not */ get subtitleDisplay() { const subtitleTrackController = this.subtitleTrackController; return subtitleTrackController ? subtitleTrackController.subtitleDisplay : false; } /** * Enable/disable subtitle display rendering */ set subtitleDisplay(value) { const subtitleTrackController = this.subtitleTrackController; if (subtitleTrackController) { subtitleTrackController.subtitleDisplay = value; } } /** * get mode for Low-Latency HLS loading */ get lowLatencyMode() { return this.config.lowLatencyMode; } /** * Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK. */ set lowLatencyMode(mode) { this.config.lowLatencyMode = mode; } /** * Position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```) * @returns null prior to loading live Playlist */ get liveSyncPosition() { return this.latencyController.liveSyncPosition; } /** * Estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced) * @returns 0 before first playlist is loaded */ get latency() { return this.latencyController.latency; } /** * maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition``` * configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration``` * @returns 0 before first playlist is loaded */ get maxLatency() { return this.latencyController.maxLatency; } /** * target distance from the edge as calculated by the latency controller */ get targetLatency() { return this.latencyController.targetLatency; } /** * the rate at which the edge of the current live playlist is advancing or 1 if there is none */ get drift() { return this.latencyController.drift; } /** * set to true when startLoad is called before MANIFEST_PARSED event */ get forceStartLoad() { return this.streamController.forceStartLoad; } } Hls.defaultConfig = void 0; class HlsDecoder$1 extends Emitter { constructor(player) { super(); this.player = player; this.TAG_NAME = 'HlsDecoder'; player._opt; this.canVideoPlay = false; this.$videoElement = null; this.canvasRenderInterval = null; this.bandwidthEstimateInterval = null; this.fpsInterval = null; this.hlsFps = 0; this.hlsPrevFrams = 0; this.isInitInfo = false; this.eventsDestroy = []; this.supportVideoFrameCallbackHandle = null; // 支持原生的hls 播放。 if (this.player.isHlsCanVideoPlay()) { this.$videoElement = this.player.video.$videoElement; this.canVideoPlay = true; } else if (Hls.isSupported()) { this.$videoElement = this.player.video.$videoElement; this.hls = new Hls({ // levelLoadingTimeOut: _config.loadingTimeout * 1000, // levelLoadingMaxRetry: _config.loadingTimeoutReplayTimes, // maxBufferHole: 1,// // maxBufferLength: 2, }); this._initHls(); this._bindEvents(); } else { this.player.debug.error(this.TAG_NAME, 'init hls error ,not support '); } this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { return new Promise((resolve, reject) => { if (this.supportVideoFrameCallbackHandle && this.$videoElement) { this.$videoElement.cancelVideoFrameCallback(this.supportVideoFrameCallbackHandle); this.supportVideoFrameCallbackHandle = null; } if (this.hls) { this.hls.destroy(); this.hls = null; } if (this.eventsDestroy.length) { this.eventsDestroy.forEach(event => event()); this.eventsDestroy = []; } this.isInitInfo = false; this._stopCanvasRender(); this._stopBandwidthEstimateInterval(); this._stopFpsInterval(); this.$videoElement = null; this.hlsFps = 0; this.player.debug.log(this.TAG_NAME, 'destroy'); setTimeout(() => { resolve(); }, 0); }); } checkHlsBufferedDelay() { const $video = this.$videoElement; let result = 0; const ranges = $video.buffered; const buffered = ranges.length ? ranges.end(ranges.length - 1) : 0; result = buffered - $video.currentTime; if (result < 0) { this.player.debug.warn(this.TAG_NAME, `checkHlsBufferedDelay ${result} < 0, and buffered is ${buffered} ,currentTime is ${$video.currentTime} , try to seek ${$video.currentTime} to ${buffered}`); $video.currentTime = buffered; result = 0; } return result; } getFps() { return this.hlsFps; } _startCanvasRender() { if (supportVideoFrameCallback()) { this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } else { this._stopCanvasRender(); this.canvasRenderInterval = setInterval(() => { this.player.video.render({ $video: this.$videoElement, ts: parseInt(this.$videoElement.currentTime * 1000, 10) || 0 }); }, 1000 / 25); } } _stopCanvasRender() { if (this.canvasRenderInterval) { clearInterval(this.canvasRenderInterval); this.canvasRenderInterval = null; } } videoFrameCallback(now) { let metaData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (this.player.isDestroyedOrClosed()) { this.player.debug.log(this.TAG_NAME, 'videoFrameCallback() player is destroyed'); return; } const ts = parseInt(Math.max(metaData.mediaTime, this.$videoElement.currentTime) * 1000, 10) || 0; this.player.video.render({ $video: this.$videoElement, ts }); this.player.handleRender(); this.player.updateStats({ dts: ts }); this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } _startBandwidthEstimateInterval() { this._stopBandwidthEstimateInterval(); this.bandwidthEstimateInterval = setInterval(() => { let bandwidthEstimate = 0; if (this.hls.bandwidthEstimate) { bandwidthEstimate = this.hls.bandwidthEstimate; } // todo: 感觉计算的有问题:(byteLen * 8 * 1000) / 1024 this.player.emit(EVENTS.kBps, (bandwidthEstimate / 1024 / 8 / 10).toFixed(2)); }, 1 * 1000); } _stopBandwidthEstimateInterval() { if (this.bandwidthEstimateInterval) { clearInterval(this.bandwidthEstimateInterval); this.bandwidthEstimateInterval = null; } } _startFpsInterval() { this._stopCanvasRender(); this.fpsInterval = setInterval(() => { if (this.$videoElement) { if (isFunction$1(this.$videoElement.getVideoPlaybackQuality)) { const videoPlaybackQuality = this.$videoElement.getVideoPlaybackQuality(); this.hlsFps = videoPlaybackQuality.totalVideoFrames - this.hlsPrevFrams; this.hlsPrevFrams = videoPlaybackQuality.totalVideoFrames; } else { const totalVideoFrames = this.$videoElement.webkitDecodedFrameCount || 0; this.hlsFps = totalVideoFrames - this.hlsPrevFrams; this.hlsPrevFrams = totalVideoFrames; } } }, 1 * 1000); } _stopFpsInterval() { if (this.fpsInterval) { clearInterval(this.fpsInterval); this.fpsInterval = null; } } _initHls() { if (this.player._opt.useCanvasRender) { this.$videoElement = document.createElement('video'); this.$videoElement.muted = true; if (isSafari()) { this.$videoElement.style.position = 'absolute'; } this.initVideoEvents(); // 这里需要监听事件 } this.hls.attachMedia(this.$videoElement); } _bindEvents() { const player = this.player; const { proxy } = this.player.events; this.hls; const $videoElement = this.$videoElement; const isSupportVideoFrameCallback = supportVideoFrameCallback(); const timeUpdateDestroy = proxy($videoElement, VIDEO_ELEMENT_EVENTS.timeUpdate, event => { if (this.hls) { const timestamp = parseInt(event.timeStamp, 10); if (this.player._opt.useCanvasRender && isFalse(isSupportVideoFrameCallback)) { player.updateStats({ ts: timestamp, dts: timestamp }); } } }); this.eventsDestroy.push(timeUpdateDestroy); this._startBandwidthEstimateInterval(); this._startFpsInterval(); // error this.hls.on(Hls.Events.ERROR, (event, data) => { if (data.fatal) { switch (data.type) { case Hls.ErrorTypes.NETWORK_ERROR: // try to recover network error this.player.debug.warn(this.TAG_NAME, 'fatal network error encountered, try to recover'); // 应调用以恢复网络错误。 this.hls.startLoad(); break; case Hls.ErrorTypes.MEDIA_ERROR: this.player.debug.warn(this.TAG_NAME, 'fatal media error encountered, try to recover'); // 应调用以恢复媒体错误。 this.hls.recoverMediaError(); break; } } }); // 貌似没触发。。。 this.hls.on(Hls.Events.MEDIA_ATTACHING, () => { // this.player.debug.log(this.TAG_NAME, 'MEDIA_ATTACHING'); }); this.hls.on(Hls.Events.MEDIA_ATTACHED, () => { // this.player.debug.log(this.TAG_NAME, 'MEDIA_ATTACHED'); }); // 在媒体元素分解MediaSource之前被解雇 this.hls.on(Hls.Events.MEDIA_DETACHING, () => { // this.player.debug.log(this.TAG_NAME, 'MEDIA_DETACHING'); }); // 当MediaSource已经从媒体元素分离时触发 this.hls.on(Hls.Events.MEDIA_DETACHED, () => { // this.player.debug.log(this.TAG_NAME, 'MEDIA_DETACHED'); }); this.hls.on(Hls.Events.BUFFER_RESET, () => { // this.player.debug.log(this.TAG_NAME, 'BUFFER_RESET'); }); this.hls.on(Hls.Events.BUFFER_CODECS, () => { // this.player.debug.log(this.TAG_NAME, 'BUFFER_CODECS'); }); this.hls.on(Hls.Events.BUFFER_CREATED, () => { // this.player.debug.log(this.TAG_NAME, 'BUFFER_CREATED'); }); // buffer appending this.hls.on(Hls.Events.BUFFER_APPENDING, (event, payload) => { this.player.debug.log(this.TAG_NAME, 'BUFFER_APPENDING', payload.type); }); this.hls.on(Hls.Events.BUFFER_APPENDED, () => { // this.player.debug.log(this.TAG_NAME, 'BUFFER_APPENDED'); }); this.hls.on(Hls.Events.BUFFER_EOS, () => { // this.player.debug.log(this.TAG_NAME, 'fired when the stream is finished and we want to notify the media buffer that there will be no more data'); }); this.hls.on(Hls.Events.BUFFER_FLUSHING, () => { // this.player.debug.log(this.TAG_NAME, 'fired when the media buffer should be flushed'); }); this.hls.on(Hls.Events.BUFFER_FLUSHED, () => { // this.player.debug.log(this.TAG_NAME, 'fired when the media buffer has been flushed'); }); // 开始加载playlist m3u8资源 this.hls.on(Hls.Events.MANIFEST_LOADING, () => { this.player.debug.log(this.TAG_NAME, 'MANIFEST_LOADING 开始加载playlist m3u8资源'); }); // playlist m3u8文件加载完成 this.hls.on(Hls.Events.MANIFEST_LOADED, (event, data) => { this.player.debug.log(this.TAG_NAME, 'MANIFEST_LOADED playlist m3u8文件加载完成', data.url); }); // playlist m3u8解析完成 this.hls.on(Hls.Events.MANIFEST_PARSED, () => { this.player.debug.log(this.TAG_NAME, 'MANIFEST_PARSED playlist m3u8解析完成'); // 模拟 demux 时间 if (!player._times.demuxStart) { player._times.demuxStart = now$2(); } }); // 加载特定码率的m3u8文件 this.hls.on(Hls.Events.LEVEL_LOADING, () => { // this.player.debug.log(this.TAG_NAME, 'LEVEL_LOADING 加载特定码率的m3u8文件'); }); // 特定码率的m3u8文件解析完成,拿到该码率对应的ts列表 this.hls.on(Hls.Events.LEVEL_LOADED, (event, data) => { // this.player.debug.log(this.TAG_NAME, 'LEVEL_LOADED 特定码率的m3u8文件解析完成,拿到该码率对应的ts列表'); }); // 开始加载某个ts分片文件,开始根据ts片下载时间预估带宽 this.hls.on(Hls.Events.FRAG_LOADING, () => { // this.player.debug.log(this.TAG_NAME, 'FRAG_LOADING 开始加载某个ts分片文件,开始根据ts片下载时间预估带宽'); }); // ts分片文件加载成功,开始转码 this.hls.on(Hls.Events.FRAG_LOADED, (event, payload) => { // this.player.debug.log(this.TAG_NAME, 'FRAG_LOADED ts分片文件加载成功,开始转码'); // 模拟 decode 时间 if (!player._times.decodeStart) { player._times.decodeStart = now$2(); } // this.player.debug.log(this.TAG_NAME, 'FRAG_LOADED', payload); // const frag = payload.frag || {} // const stats = payload.stats || {} // const buffering = stats.buffering || {}; // this.player.debug.log(this.TAG_NAME, 'FRAG_LOADED buffering.end', buffering, buffering.end); // if (buffering.end) { // this.player.updateStats({ // dts: buffering.end // }) // } }); // 视频流赋给video标签 this.hls.on(Hls.Events.BUFFER_APPENDING, () => { // this.player.debug.log(this.TAG_NAME, 'BUFFER_APPENDING 视频流赋给video标签'); if (!player._times.videoStart) { player._times.videoStart = now$2(); player.handlePlayToRenderTimes(); } }); this.hls.on(Hls.Events.FRAG_DECRYPTED, () => { // this.player.debug.log(this.TAG_NAME, 'FRAG_DECRYPTED fired when a fragment decryption is completed'); }); this.hls.on(Hls.Events.KEY_LOADING, () => { // this.player.debug.log(this.TAG_NAME, 'KEY_LOADING fired when a decryption key loading starts'); }); this.hls.on(Hls.Events.KEY_LOADING, () => { // this.player.debug.log(this.TAG_NAME, 'KEY_LOADING fired when a fragment decryption is completed'); }); this.hls.on(Hls.Events.FPS_DROP, data => { // this.player.debug.log(this.TAG_NAME, 'FPS_DROP', data); }); this.hls.on(Hls.Events.FPS_DROP_LEVEL_CAPPING, data => { // this.player.debug.log(this.TAG_NAME, 'FPS_DROP_LEVEL_CAPPING', data); }); // fired when Init Segment has been extracted from fragment this.hls.on(Hls.Events.FRAG_PARSING_INIT_SEGMENT, (id, payload) => { this.player.debug.log(this.TAG_NAME, 'FRAG_PARSING_INIT_SEGMENT', payload); const hasAudio = payload && payload.tracks && payload.tracks.audio ? true : false; const hasVideo = payload && payload.tracks && payload.tracks.video ? true : false; if (hasAudio && payload.tracks.audio) { let track = payload.tracks.audio; const audioChannelCount = track.metadata && track.metadata.channelCount ? track.metadata.channelCount : 0; const audioCodec = track.codec; this.player.audio && this.player.audio.updateAudioInfo({ encType: audioCodec, channels: audioChannelCount, sampleRate: 44100 // default audioContext.sampleRate }); } if (hasVideo && payload.tracks.video) { let track = payload.tracks.video; let videoCodec = track.codec; const info = { encTypeCode: videoCodec.indexOf('avc') !== -1 ? VIDEO_ENC_CODE.h264 : VIDEO_ENC_CODE.h265 }; if (track.metadata) { info.width = track.metadata.width; info.height = track.metadata.height; } this.player.video && this.player.video.updateVideoInfo(info); } }); } initVideoPlay(url) { if (this.player._opt.useCanvasRender) { this.$videoElement = document.createElement('video'); this.initVideoEvents(); } this.$videoElement.autoplay = true; this.$videoElement.muted = true; this.$videoElement.src = url; } _initRenderSize() { if (!this.isInitInfo) { this.player.video.updateVideoInfo({ width: this.$videoElement.videoWidth, height: this.$videoElement.videoHeight }); this.player.video.initCanvasViewSize(); this.isInitInfo = true; } } // initVideoEvents() { const { proxy } = this.player.events; const canPlayDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.canplay, () => { this.player.debug.log(this.TAG_NAME, 'video canplay'); this.$videoElement.play().then(() => { this.player.debug.log(this.TAG_NAME, 'video play'); this._startCanvasRender(); this._initRenderSize(); }).catch(e => { this.player.debug.warn(this.TAG_NAME, 'video play error ', e); }); }); const waitingDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.waiting, () => { // this.player.emit(EVENTS.videoWaiting); this.player.debug.log(this.TAG_NAME, 'video waiting'); }); const timeUpdateDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.timeUpdate, event => { const timeStamp = parseInt(event.timeStamp, 10); this.player.handleRender(); this.player.updateStats({ ts: timeStamp }); // this.player.emit(EVENTS.videoTimeUpdate, timeStamp); // check video is playing if (this.$videoElement.paused) { this.player.debug.warn(this.TAG_NAME, 'video is paused and next try to replay'); this.$videoElement.play().then(() => { this.player.debug.log(this.TAG_NAME, 'video is paused and replay success'); }).catch(e => { this.player.debug.warn(this.TAG_NAME, 'video is paused and replay error ', e); }); } }); const rateChangeDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.ratechange, () => { this.player.debug.log(this.TAG_NAME, 'video playback Rate change', this.$videoElement && this.$videoElement.playbackRate); }); this.eventsDestroy.push(canPlayDestroy, waitingDestroy, timeUpdateDestroy, rateChangeDestroy); } loadSource(url) { return new Promise((resolve, reject) => { if (this.canVideoPlay) { this.initVideoPlay(url); resolve(); } else { this.hls.on(Hls.Events.MEDIA_ATTACHED, () => { this.hls.loadSource(url); // this.hls.on(Hls.Events.MANIFEST_PARSED, function () { // resolve() // }) resolve(); }); } }); } } /** * @typedef {{ * isLive?: boolean, * softDecode?: boolean, * targetLatency?: number, * maxPlaylistSize?: number, * maxLatency?: number, * bufferBehind?: number, * maxJumpDistance?: number, * startTime?: number, * retryCount?: number, * retryDelay?: number, * loadTimeout?: number, * preloadTime?: number, * disconnectTime 是当直播暂停后,直播延迟超过该值就会断流,作为插件使用时该值是 0 秒,用户暂停直播后就会断流,点击播放会重新拉流。 * 但是用户频繁的暂停播放,就会导致频繁的断流和拉流,设置该值可以让短时间的停止播放不断流。 * disconnectTime?: number, 特有:直播断流时间,默认 0 秒,(独立使用时等于 maxLatency) * fetchOptions?: RequestInit * onPreM3U8Parse?: (m3u8: string) => string | void * decryptor?: Decryptor * }} HlsOption */ /** * @param {HlsOption} cfg * @returns {HlsOption} */ function getConfig$1(cfg) { return { isLive: true, maxPlaylistSize: 50, retryCount: 3, // 重试 3 次,默认值 retryDelay: 1000, // 每次重试间隔 1 秒,默认值 pollRetryCount: 2, loadTimeout: 10000, // 请求超时时间为 10 秒,默认值 preloadTime: 30, // preloadTime指定允许的预加载buffer的最大长度(单位s) softDecode: false, bufferBehind: 10, maxJumpDistance: 3, startTime: 0, // targetLatency 和 maxLatency 关系是,当直播延迟超过 maxLatency 时,hls 就会将当前播放时间点跳转到 targetLatency 位置。 // 所以配置这两个值的时候需要确保 maxLatency 大于 targetLatency,并且应该大很多,默认值是大两倍。 targetLatency: 10, // 特有:直播目标延迟,默认 10 秒 maxLatency: 20, // 特有:直播允许的最大延迟,默认 20 秒 allowedStreamTrackChange: true, ...cfg }; } const CACHESIZE = 2 * 1024 * 1024; const LoaderType = { FETCH: 'fetch', XHR: 'xhr' }; const ResponseType = { ARRAY_BUFFER: 'arraybuffer', TEXT: 'text', JSON: 'json' }; const EVENT = { REAL_TIME_SPEED: 'real_time_speed' }; const toString = Object.prototype.toString; function isObject(a) { return a !== null && typeof a === 'object'; } function isPlainObject(val) { if (toString.call(val) !== '[object Object]') { return false; } const prototype = Object.getPrototypeOf(val); return prototype === null || prototype === Object.prototype; } function isDate(a) { return toString.call(a) === '[object Date]'; } function getConfig(cfg) { return { loaderType: LoaderType.FETCH, retry: 0, retryDelay: 0, // ms timeout: 0, request: null, // Request onTimeout: undefined, onProgress: undefined, onRetryError: undefined, transformRequest: undefined, transformResponse: undefined, transformError: undefined, responseType: ResponseType.TEXT, range: undefined, url: '', params: undefined, method: 'GET', headers: {}, body: undefined, mode: undefined, credentials: undefined, cache: undefined, redirect: undefined, referrer: undefined, referrerPolicy: undefined, integrity: undefined, onProcessMinLen: 0, ...cfg }; } function getRangeValue(value) { if (!value || value[0] === null || value[0] === undefined || value[0] === 0 && (value[1] === null || value[1] === undefined)) { return; } let ret = 'bytes=' + value[0] + '-'; if (value[1]) ret += value[1]; return ret; } function encode(val) { return encodeURIComponent(val).replace(/%3A/gi, ':').replace(/%24/g, '$').replace(/%2C/gi, ',').replace(/%20/g, '+').replace(/%5B/gi, '[').replace(/%5D/gi, ']'); } function setUrlParams(url, params) { if (!url) return; if (!params) return url; let v; const str = Object.keys(params).map(k => { v = params[k]; if (v === null || v === undefined) return; if (Array.isArray(v)) { k = k + '[]'; } else { v = [v]; } return v.map(x => { if (isDate(x)) { x = x.toISOString(); } else if (isObject(x)) { x = JSON.stringify(x); } return `${encode(k)}=${encode(x)}`; }).join('&'); }).filter(Boolean).join('&'); if (str) { const hashIndex = url.indexOf('#'); if (hashIndex !== -1) { url = url.slice(0, hashIndex); } url += (url.indexOf('?') === -1 ? '?' : '&') + str; } return url; } function createResponse(data, done, response, contentLength, age, startTime, firstByteTime, index, range, vid, priOptions) { age = age !== null && age !== undefined ? parseFloat(age) : null; contentLength = parseInt(contentLength || '0', 10); if (Number.isNaN(contentLength)) contentLength = 0; const option = { range, vid, index, contentLength, age, startTime, firstByteTime, endTime: Date.now(), priOptions }; return { data, done, option, response }; } function calculateSpeed(byteLen, millisec) { return Math.round(byteLen * 8 * 1000 / millisec / 1024); } class NetError extends Error { constructor(url, request, response, msg) { super(msg); _defineProperty$1(this, "retryCount", 0); _defineProperty$1(this, "isTimeout", false); _defineProperty$1(this, "loaderType", LoaderType.FETCH); _defineProperty$1(this, "startTime", 0); _defineProperty$1(this, "endTime", 0); _defineProperty$1(this, "options", {}); this.url = url; this.request = request; this.response = response; } } class FetchLoader extends Emitter { // 比较私有化的参数传递,回调时候透传 constructor(player) { super(); _defineProperty$1(this, "_abortController", null); _defineProperty$1(this, "_timeoutTimer", null); _defineProperty$1(this, "_reader", null); _defineProperty$1(this, "_response", null); _defineProperty$1(this, "_aborted", false); _defineProperty$1(this, "_index", -1); _defineProperty$1(this, "_range", null); _defineProperty$1(this, "_receivedLength", 0); _defineProperty$1(this, "_running", false); _defineProperty$1(this, "_logger", null); _defineProperty$1(this, "_vid", ''); _defineProperty$1(this, "_onProcessMinLen", 0); _defineProperty$1(this, "_onCancel", null); _defineProperty$1(this, "_priOptions", null); _defineProperty$1(this, "TAG_NAME", 'FetchLoader'); this.player = player; } load(_ref) { var _this$_abortControlle; let { url, vid, timeout, // ms responseType, onProgress, index, onTimeout, onCancel, range, transformResponse, request, params, logger, method, headers, body, mode, credentials, cache, redirect, referrer, referrerPolicy, onProcessMinLen, priOptions } = _ref; this._aborted = false; this._onProcessMinLen = onProcessMinLen; this._onCancel = onCancel; this._abortController = typeof AbortController !== 'undefined' && new AbortController(); this._running = true; this._index = index; this._range = range || [0, 0]; this._vid = vid || url; this._priOptions = priOptions || {}; const init = { method, headers, body, mode, credentials, cache, redirect, referrer, referrerPolicy, signal: (_this$_abortControlle = this._abortController) === null || _this$_abortControlle === void 0 ? void 0 : _this$_abortControlle.signal }; let isTimeout = false; clearTimeout(this._timeoutTimer); url = setUrlParams(url, params); const rangeValue = getRangeValue(range); if (rangeValue) { if (request) { headers = request.headers; } else { headers = init.headers = init.headers || (Headers ? new Headers() : {}); } if (Headers && headers instanceof Headers) { headers.append('Range', rangeValue); } else { headers.Range = rangeValue; } } if (timeout) { this._timeoutTimer = setTimeout(() => { isTimeout = true; this.cancel(); if (onTimeout) { const error = new NetError(url, init, null, 'timeout'); error.isTimeout = true; onTimeout(error, { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }); } }, timeout); } const startTime = Date.now(); if (isNotEmpty(index) || isNotEmpty(range)) { this.player.debug.log(this.TAG_NAME, '[fetch load start], index,', index, ',range,', range); } return new Promise((resolve, reject) => { fetch(request || url, request ? undefined : init).then(async response => { clearTimeout(this._timeoutTimer); this._response = response; if (this._aborted || !this._running) return; if (transformResponse) { response = transformResponse(response, url) || response; } if (!response.ok) { throw new NetError(url, init, response, 'bad network response'); } const firstByteTime = Date.now(); let data; if (responseType === ResponseType.TEXT) { data = await response.text(); this._running = false; } else if (responseType === ResponseType.JSON) { data = await response.json(); this._running = false; } else { if (onProgress) { this.resolve = resolve; this.reject = reject; this._loadChunk(response, onProgress, startTime, firstByteTime); return; } else { data = await response.arrayBuffer(); data = new Uint8Array(data); this._running = false; const costTime = Date.now() - startTime; const speed = calculateSpeed(data.byteLength, costTime); this.emit(EVENT.REAL_TIME_SPEED, { speed, len: data.byteLength, time: costTime, vid: this._vid, index: this._index, range: this._range, priOptions: this._priOptions }); } } if (isNotEmpty(index) || isNotEmpty(range)) { this.player.debug.log(this.TAG_NAME, '[fetch load end], index,', index, ',range,', range); } resolve(createResponse(data, true, response, response.headers.get('Content-Length'), response.headers.get('age'), startTime, firstByteTime, index, range, this._vid, this._priOptions)); }).catch(error => { var _error; clearTimeout(this._timeoutTimer); this._running = false; if (this._aborted && !isTimeout) return; error = error instanceof NetError ? error : new NetError(url, init, null, (_error = error) === null || _error === void 0 ? void 0 : _error.message); error.startTime = startTime; error.endTime = Date.now(); error.isTimeout = isTimeout; error.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; reject(error); }); }); } async cancel() { if (this._aborted) return; this._aborted = true; this._running = false; if (this._response) { try { // await this._response.body.cancel() if (this._reader) { await this._reader.cancel(); } } catch (error) { // ignore } this._response = this._reader = null; } if (this._abortController) { try { this._abortController.abort(); } catch (error) { // ignore } this._abortController = null; } if (this._onCancel) { this._onCancel({ index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }); } } _loadChunk(response, onProgress, st, firstByteTime) { if (!response.body || !response.body.getReader) { this._running = false; const err = new NetError(response.url, '', response, 'onProgress of bad response.body.getReader'); err.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; this.reject(err); return; } if (this._onProcessMinLen > 0) { this._cache = new Uint8Array(CACHESIZE); this._writeIdx = 0; } const reader = this._reader = response.body.getReader(); let data; let startTime; let endTime; const pump = async () => { var _this$_range; startTime = Date.now(); try { data = await reader.read(); endTime = Date.now(); } catch (e) { // request aborted endTime = Date.now(); if (!this._aborted) { this._running = false; e.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; this.reject(e); } return; } const startRange = ((_this$_range = this._range) === null || _this$_range === void 0 ? void 0 : _this$_range.length) > 0 ? this._range[0] : 0; const startByte = startRange + this._receivedLength; if (this._aborted) { this._running = false; onProgress(undefined, false, { range: [startByte, startByte], vid: this._vid, index: this._index, startTime, endTime, st, firstByteTime, priOptions: this._priOptions }, response); return; } const curLen = data.value ? data.value.byteLength : 0; this._receivedLength += curLen; this.player.debug.log(this.TAG_NAME, '【fetchLoader,onProgress call】,task,', this._range, ', start,', startByte, ', end,', startRange + this._receivedLength, ', done,', data.done); let retData; if (this._onProcessMinLen > 0) { if (this._writeIdx + curLen >= this._onProcessMinLen || data.done) { retData = new Uint8Array(this._writeIdx + curLen); retData.set(this._cache.slice(0, this._writeIdx), 0); curLen > 0 && retData.set(data.value, this._writeIdx); this._writeIdx = 0; this.player.debug.log(this.TAG_NAME, '【fetchLoader,onProgress enough】,done,', data.done, ',len,', retData.byteLength, ', writeIdx,', this._writeIdx); } else { if (curLen > 0 && this._writeIdx + curLen < CACHESIZE) { this._cache.set(data.value, this._writeIdx); this._writeIdx += curLen; this.player.debug.log(this.TAG_NAME, '【fetchLoader,onProgress cache】,len,', curLen, ', writeIdx,', this._writeIdx); } else if (curLen > 0) { const temp = new Uint8Array(this._writeIdx + curLen + 2048); this.player.debug.log(this.TAG_NAME, '【fetchLoader,onProgress extra start】,size,', this._writeIdx + curLen + 2048, ', datalen,', curLen, ', writeIdx,', this._writeIdx); temp.set(this._cache.slice(0, this._writeIdx), 0); curLen > 0 && temp.set(data.value, this._writeIdx); this._writeIdx += curLen; delete this._cache; this._cache = temp; this.player.debug.log(this.TAG_NAME, '【fetchLoader,onProgress extra end】,len,', curLen, ', writeIdx,', this._writeIdx); } } } else { retData = data.value; } if (retData && retData.byteLength > 0 || data.done) { onProgress(retData, data.done, { range: [this._range[0] + this._receivedLength - (retData ? retData.byteLength : 0), this._range[0] + this._receivedLength], vid: this._vid, index: this._index, startTime, endTime, st, firstByteTime, priOptions: this._priOptions }, response); } if (!data.done) { pump(); } else { const costTime = Date.now() - st; const speed = calculateSpeed(this._receivedLength, costTime); this.emit(EVENT.REAL_TIME_SPEED, { speed, len: this._receivedLength, time: costTime, vid: this._vid, index: this._index, range: this._range, priOptions: this._priOptions }); this._running = false; this.player.debug.log(this.TAG_NAME, '[fetchLoader onProgress end],task,', this._range, ',done,', data.done); this.resolve(createResponse(data, true, response, response.headers.get('Content-Length'), response.headers.get('age'), st, firstByteTime, this._index, this._range, this._vid, this._priOptions)); } }; pump(); } get receiveLen() { return this._receivedLength; } get running() { return this._running; } set running(status) { this._running = status; } static isSupported() { return !!(typeof fetch !== 'undefined'); } } class Task { constructor(type, config, player) { _defineProperty$1(this, "TAG_NAME", 'Task'); this.promise = createPublicPromise(); this.alive = !!config.onProgress; this._loaderType = type; this.player = player; this._loader = type === LoaderType.FETCH && !!window.fetch ? new FetchLoader(player) : new XhrLoader(player); this._config = config; this._retryCount = 0; this._retryTimer = null; this._canceled = false; this._retryCheckFunc = config.retryCheckFunc; } exec() { const { retry, retryDelay, onRetryError, transformError, ...rest } = this._config; const request = async () => { try { const response = await this._loader.load(rest); this.promise.resolve(response); } catch (e) { this._loader.running = false; this.player.debug.log(this.TAG_NAME, '[task request catch err]', e); if (this._canceled) return; e.loaderType = this._loaderType; e.retryCount = this._retryCount; let error = e; if (transformError) { error = transformError(error) || error; } if (onRetryError && this._retryCount > 0) onRetryError(error, this._retryCount, { index: rest.index, vid: rest.vid, range: rest.range, priOptions: rest.priOptions }); this._retryCount++; let isRetry = true; if (this._retryCheckFunc) { isRetry = this._retryCheckFunc(e); } if (isRetry && this._retryCount <= retry) { clearTimeout(this._retryTimer); this.player.debug.log(this.TAG_NAME, '[task request setTimeout],retry', this._retryCount, ',retry range,', rest.range); this._retryTimer = setTimeout(request, retryDelay); return; } this.promise.reject(error); } }; request(); return this.promise; } async cancel() { clearTimeout(this._retryTimer); this._canceled = true; this._loader.running = false; return this._loader.cancel(); } get running() { return this._loader && this._loader.running; } get loader() { return this._loader; } } class XhrLoader extends Emitter { // _chunkSizeKBList = [ // 128, 256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, 5120, 6144, 7168, 8192 // ] // _speedSampler = new SpeedSampler() // 比较私有化的参数传递,回调时候透传 constructor(player) { super(); _defineProperty$1(this, "_xhr", null); _defineProperty$1(this, "_aborted", false); _defineProperty$1(this, "_timeoutTimer", null); _defineProperty$1(this, "_range", null); _defineProperty$1(this, "_receivedLength", 0); _defineProperty$1(this, "_url", null); _defineProperty$1(this, "_onProgress", null); _defineProperty$1(this, "_index", -1); _defineProperty$1(this, "_headers", null); _defineProperty$1(this, "_currentChunkSizeKB", 384); _defineProperty$1(this, "_timeout", null); _defineProperty$1(this, "_xhr", null); _defineProperty$1(this, "_withCredentials", null); _defineProperty$1(this, "_startTime", -1); _defineProperty$1(this, "_loadCompleteResolve", null); _defineProperty$1(this, "_loadCompleteReject", null); _defineProperty$1(this, "_runing", false); _defineProperty$1(this, "_logger", false); _defineProperty$1(this, "_vid", ''); _defineProperty$1(this, "_responseType", void 0); _defineProperty$1(this, "_credentials", void 0); _defineProperty$1(this, "_method", void 0); _defineProperty$1(this, "_transformResponse", void 0); _defineProperty$1(this, "_firstRtt", void 0); _defineProperty$1(this, "_onCancel", null); _defineProperty$1(this, "_priOptions", null); _defineProperty$1(this, "TAG_NAME", 'XhrLoader'); this.player = player; } load(req) { clearTimeout(this._timeoutTimer); this._range = req.range; this._onProgress = req.onProgress; this._index = req.index; this._headers = req.headers; this._withCredentials = req.credentials === 'include' || req.credentials === 'same-origin'; this._body = req.body || null; req.method && (this._method = req.method); this._timeout = req.timeout || null; this._runing = true; this._vid = req.vid || req.url; this._responseType = req.responseType; this._firstRtt = -1; this._onTimeout = req.onTimeout; this._onCancel = req.onCancel; this._request = req.request; this._priOptions = req.priOptions || {}; this.player.debug.log(this.TAG_NAME, '【xhrLoader task】, range', this._range); this._url = setUrlParams(req.url, req.params); const startTime = Date.now(); return new Promise((resolve, reject) => { this._loadCompleteResolve = resolve; this._loadCompleteReject = reject; this._startLoad(); }).catch(error => { clearTimeout(this._timeoutTimer); this._runing = false; if (this._aborted) return; error = error instanceof NetError ? error : new NetError(this._url, this._request); error.startTime = startTime; error.endTime = Date.now(); error.options = { index: this._index, vid: this._vid, priOptions: this._priOptions }; throw error; }); } _startLoad() { let range = null; if (this._responseType === ResponseType.ARRAY_BUFFER && this._range && this._range.length > 1) { if (this._onProgress) { this._firstRtt = -1; const chunkSize = this._currentChunkSizeKB * 1024; const from = this._range[0] + this._receivedLength; let to = this._range[1]; if (chunkSize < this._range[1] - from) { to = from + chunkSize; } range = [from, to]; this.player.debug.log(this.TAG_NAME, '[xhr_loader->],tast :', this._range, ', SubRange, ', range); } else { range = this._range; this.player.debug.log(this.TAG_NAME, '[xhr_loader->],tast :', this._range, ', allRange, ', range); } } this._internalOpen(range); } _internalOpen(range) { try { this._startTime = Date.now(); const xhr = this._xhr = new XMLHttpRequest(); xhr.open(this._method || 'GET', this._url, true); xhr.responseType = this._responseType; this._timeout && (xhr.timeout = this._timeout); xhr.withCredentials = this._withCredentials; xhr.onload = this._onLoad.bind(this); xhr.onreadystatechange = this._onReadyStatechange.bind(this); xhr.onerror = errorEvent => { var _errorEvent$currentTa, _errorEvent$currentTa2, _errorEvent$currentTa3; this._running = false; const error = new NetError(this._url, this._request, errorEvent === null || errorEvent === void 0 ? void 0 : (_errorEvent$currentTa = errorEvent.currentTarget) === null || _errorEvent$currentTa === void 0 ? void 0 : _errorEvent$currentTa.response, 'xhr.onerror.status:' + (errorEvent === null || errorEvent === void 0 ? void 0 : (_errorEvent$currentTa2 = errorEvent.currentTarget) === null || _errorEvent$currentTa2 === void 0 ? void 0 : _errorEvent$currentTa2.status) + ',statusText,' + (errorEvent === null || errorEvent === void 0 ? void 0 : (_errorEvent$currentTa3 = errorEvent.currentTarget) === null || _errorEvent$currentTa3 === void 0 ? void 0 : _errorEvent$currentTa3.statusText)); error.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; this._loadCompleteReject(error); }; xhr.ontimeout = event => { this.cancel(); const error = new NetError(this._url, this._request, { status: 408 }, 'timeout'); if (this._onTimeout) { error.isTimeout = true; this._onTimeout(error, { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }); } error.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; this._loadCompleteReject(error); }; const headers = this._headers || {}; const rangeValue = getRangeValue(range); if (rangeValue) { headers.Range = rangeValue; } if (headers) { Object.keys(headers).forEach(k => { xhr.setRequestHeader(k, headers[k]); }); } this.player.debug.log(this.TAG_NAME, '[xhr.send->] tast,', this._range, ',load sub range, ', range); xhr.send(this._body); } catch (e) { e.options = { index: this._index, range, vid: this._vid, priOptions: this._priOptions }; this._loadCompleteReject(e); } } _onReadyStatechange(e) { const xhr = e.target; if (xhr.readyState === 2) { this._firstRtt < 0 && (this._firstRtt = Date.now()); } } _onLoad(e) { var _this$_range2; const status = e.target.status; if (status < 200 || status > 299) { const error = new NetError(this._url, null, { ...e.target.response, status }, 'bad response,status:' + status); error.options = { index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }; return this._loadCompleteReject(error); } let data = null; let done = false; let byteStart; const startRange = ((_this$_range2 = this._range) === null || _this$_range2 === void 0 ? void 0 : _this$_range2.length) > 0 ? this._range[0] : 0; if (this._responseType === ResponseType.ARRAY_BUFFER) { var _this$_range3; const chunk = new Uint8Array(e.target.response); byteStart = startRange + this._receivedLength; if (chunk && chunk.byteLength > 0) { this._receivedLength += chunk.byteLength; const costTime = Date.now() - this._startTime; const speed = calculateSpeed(this._receivedLength, costTime); this.emit(EVENT.REAL_TIME_SPEED, { speed, len: this._receivedLength, time: costTime, vid: this._vid, index: this._index, range: [byteStart, startRange + this._receivedLength], priOptions: this._priOptions }); } data = chunk; if (((_this$_range3 = this._range) === null || _this$_range3 === void 0 ? void 0 : _this$_range3.length) > 1 && this._range[1] && this._receivedLength < this._range[1] - this._range[0]) { done = false; } else { done = true; } this.player.debug.log(this.TAG_NAME, '[xhr load done->], tast :', this._range, ', start', byteStart, 'end ', startRange + this._receivedLength, ',dataLen,', chunk ? chunk.byteLength : 0, ',receivedLength', this._receivedLength, ',index,', this._index, ', done,', done); } else { done = true; data = e.target.response; } let response = { ok: status >= 200 && status < 300, status, statusText: this._xhr.statusText, url: this._xhr.responseURL, headers: this._getHeaders(this._xhr), body: this._xhr.response }; if (this._transformResponse) { response = this._transformResponse(response, this._url) || response; } if (this._onProgress) { this._onProgress(data, done, { index: this._index, vid: this._vid, range: [byteStart, startRange + this._receivedLength], startTime: this._startTime, endTime: Date.now(), priOptions: this._priOptions }, response); } if (!done) { this._startLoad(); } else { this._runing = false; this._loadCompleteResolve && this._loadCompleteResolve(createResponse(this._onProgress ? null : data, done, response, response.headers['content-length'], response.headers.age, this._startTime, this._firstRtt, this._index, this._range, this._vid, this._priOptions)); } } cancel() { if (this._aborted) return; this._aborted = true; this._runing = false; super.removeAllListeners(); if (this._onCancel) { this._onCancel({ index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions }); } if (this._xhr) { return this._xhr.abort(); } } static isSupported() { return typeof XMLHttpRequest !== 'undefined'; } get receiveLen() { return this._receivedLength; } get running() { return this._running; } set running(status) { this._running = status; } _getHeaders(xhr) { const headerLines = xhr.getAllResponseHeaders().trim().split('\r\n'); const headers = {}; for (const header of headerLines) { const parts = header.split(': '); headers[parts[0].toLowerCase()] = parts.slice(1).join(': '); } return headers; } } function createPublicPromise() { let res, rej; const promise = new Promise((resolve, reject) => { res = resolve; rej = reject; }); promise.used = false; promise.resolve = function () { promise.used = true; return res(...arguments); }; promise.reject = function () { promise.used = true; return rej(...arguments); }; return promise; } class NetLoader extends Emitter { constructor(cfg, player) { super(); _defineProperty$1(this, "type", LoaderType.FETCH); _defineProperty$1(this, "_queue", []); _defineProperty$1(this, "_alive", []); _defineProperty$1(this, "_currentTask", null); _defineProperty$1(this, "_config", void 0); this.player = player; this._config = getConfig(cfg); if (this._config.loaderType === LoaderType.XHR || !FetchLoader.isSupported()) { this.type = LoaderType.XHR; } } destroy() { this._queue = []; this._alive = []; this._currentTask = null; } isFetch() { return this.type === LoaderType.FETCH; } static isFetchSupport() { return FetchLoader.isSupported(); } load(url) { let config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (typeof url === 'string' || !url) { config.url = url || config.url || this._config.url; } else { config = url; } config = Object.assign({}, this._config, config); if (config.params) config.params = Object.assign({}, config.params); if (config.headers && isPlainObject(config.headers)) config.headers = Object.assign({}, config.headers); if (config.body && isPlainObject(config.body)) config.body = Object.assign({}, config.body); if (config.transformRequest) { config = config.transformRequest(config) || config; } const task = new Task(this.type, config, this.player); task.loader.on(EVENT.REAL_TIME_SPEED, data => { this.emit(EVENT.REAL_TIME_SPEED, data); }); this._queue.push(task); if (this._queue.length === 1 && (!this._currentTask || !this._currentTask.running)) { this._processTask(); } return task.promise; } async cancel() { const cancels = this._queue.map(t => t.cancel()).concat(this._alive.map(t => t.cancel())); if (this._currentTask) { cancels.push(this._currentTask.cancel()); } this._queue = []; this._alive = []; await Promise.all(cancels); // 不是很理解 sleep 的意思。 await sleep(); } _processTask() { this._currentTask = this._queue.shift(); if (!this._currentTask) return; if (this._currentTask.alive) { this._alive.push(this._currentTask); } const req = this._currentTask.exec().catch(e => {}); if (!(req && typeof req.finally === 'function')) return; req.finally(() => { var _this$_currentTask, _this$_alive; if ((_this$_currentTask = this._currentTask) !== null && _this$_currentTask !== void 0 && _this$_currentTask.alive && ((_this$_alive = this._alive) === null || _this$_alive === void 0 ? void 0 : _this$_alive.length) > 0) { this._alive = this._alive.filter(task => task && task !== this._currentTask); } this._processTask(); }); } } const ERR = { NETWORK: 'network', NETWORK_TIMEOUT: 'network_timeout', NETWORK_FORBIDDEN: 'network_forbidden', OTHER: 'other', MANIFEST: 'manifest', HLS: 'hls', DEMUX: 'demux' }; class StreamingError extends Error { constructor(type, subType, origin, payload, msg) { super(msg || (origin === null || origin === void 0 ? void 0 : origin.message)); this.errorType = type === ERR.NETWORK_TIMEOUT ? ERR.NETWORK : type; this.originError = origin; this.ext = payload; this.errorMessage = this.message; } static create(type, subType, origin, payload, msg) { if (type instanceof StreamingError) { return type; } else if (type instanceof Error) { origin = type; type = ''; } if (!type) type = ERR.OTHER; return new StreamingError(type, subType, origin, payload, msg); } static network(error) { var _error$response; return new StreamingError(error !== null && error !== void 0 && error.isTimeout ? ERR.NETWORK_TIMEOUT : ERR.NETWORK, null, error instanceof Error ? error : null, { url: error === null || error === void 0 ? void 0 : error.url, response: error === null || error === void 0 ? void 0 : error.response, httpCode: error === null || error === void 0 ? void 0 : (_error$response = error.response) === null || _error$response === void 0 ? void 0 : _error$response.status }); } } const REGEXP_TAG = /^#(EXT[^:]*)(?::(.*))?$/; const REGEXP_ATTR = /([^=]+)=(?:"([^"]*)"|([^",]*))(?:,|$)/g; const REGEXP_ABSOLUTE_URL = /^(?:[a-zA-Z0-9+\-.]+:)?\/\//; const REGEXP_URL_PAIR = /^((?:[a-zA-Z0-9+\-.]+:)?\/\/[^/?#]*)?([^?#]*\/)?/; function getLines(text) { return text.split(/[\r\n]/).map(x => x.trim()).filter(Boolean); } function parseTag(text) { const ret = text.match(REGEXP_TAG); if (!ret || !ret[1]) return; return [ret[1].replace('EXT-X-', ''), ret[2]]; } function parseAttr(text) { const ret = {}; let match = REGEXP_ATTR.exec(text); while (match) { ret[match[1]] = match[2] || match[3]; match = REGEXP_ATTR.exec(text); } return ret; } function getAbsoluteUrl(url, parentUrl) { if (!parentUrl || !url || REGEXP_ABSOLUTE_URL.test(url)) return url; const pairs = REGEXP_URL_PAIR.exec(parentUrl); if (!pairs) return url; if (url[0] === '/') return pairs[1] + url; return pairs[1] + pairs[2] + url; } const CODECS_REGEXP = { audio: [/^mp4a/, /^vorbis$/, /^opus$/, /^flac$/, /^[ae]c-3$/], video: [/^avc/, /^hev/, /^hvc/, /^vp0?[89]/, /^av1$/], text: [/^vtt$/, /^wvtt/, /^stpp/] }; /** * @param {'audio' | 'video' | 'text'} type * @param {Array<string>} codecs * @returns {string | undefined} */ function getCodecs(type, codecs) { const re = CODECS_REGEXP[type]; if (!re || !codecs || !codecs.length) return; for (let i = 0; i < re.length; i++) { for (let j = 0; j < codecs.length; j++) { if (re[i].test(codecs[j])) return codecs[j]; } } } class MasterPlaylist { constructor() { this.version = 0; this.streams = []; // MasterStream /** * @readonly */ this.isMaster = true; } } const MediaType = { Audio: 'AUDIO', Video: 'VIDEO', SubTitle: 'SUBTITLE', ClosedCaptions: 'CLOSED-CAPTIONS' }; class MediaStream$1 { constructor() { _defineProperty$1(this, "id", 0); _defineProperty$1(this, "url", ''); _defineProperty$1(this, "default", false); _defineProperty$1(this, "autoSelect", false); _defineProperty$1(this, "forced", false); _defineProperty$1(this, "group", ''); _defineProperty$1(this, "name", ''); _defineProperty$1(this, "lang", ''); _defineProperty$1(this, "segments", []); _defineProperty$1(this, "endSN", 0); } } class AudioStream extends MediaStream$1 { constructor() { super(...arguments); _defineProperty$1(this, "mediaType", MediaType.Audio); _defineProperty$1(this, "channels", 0); } } class SubTitleStream extends MediaStream$1 { constructor() { super(...arguments); _defineProperty$1(this, "mediaType", MediaType.SubTitle); } } class MasterStream { constructor() { _defineProperty$1(this, "id", 0); _defineProperty$1(this, "bitrate", 0); _defineProperty$1(this, "width", 0); _defineProperty$1(this, "height", 0); _defineProperty$1(this, "name", ''); _defineProperty$1(this, "url", ''); _defineProperty$1(this, "audioCodec", ''); _defineProperty$1(this, "videoCodec", ''); _defineProperty$1(this, "textCodec", ''); _defineProperty$1(this, "audioGroup", ''); _defineProperty$1(this, "audioStreams", []); _defineProperty$1(this, "subtitleStreams", []); _defineProperty$1(this, "closedCaptionsStream", []); } } class MediaPlaylist { constructor() { _defineProperty$1(this, "version", 0); _defineProperty$1(this, "url", ''); _defineProperty$1(this, "type", ''); _defineProperty$1(this, "startCC", 0); _defineProperty$1(this, "endCC", 0); _defineProperty$1(this, "startSN", 0); _defineProperty$1(this, "endSN", 0); _defineProperty$1(this, "totalDuration", 0); _defineProperty$1(this, "targetDuration", 0); _defineProperty$1(this, "live", true); _defineProperty$1(this, "segments", []); } } class MediaSegment { constructor() { _defineProperty$1(this, "sn", 0); _defineProperty$1(this, "cc", 0); _defineProperty$1(this, "url", ''); _defineProperty$1(this, "title", ''); _defineProperty$1(this, "start", 0); _defineProperty$1(this, "duration", 0); _defineProperty$1(this, "key", null); _defineProperty$1(this, "byteRange", null); _defineProperty$1(this, "isInitSegment", false); _defineProperty$1(this, "initSegment", null); _defineProperty$1(this, "isLast", false); _defineProperty$1(this, "hasAudio", false); _defineProperty$1(this, "hasVideo", false); } get end() { return this.start + this.duration; } setTrackExist(v, a) { this.hasVideo = v; this.hasAudio = a; } setByteRange(data, prevSegment) { this.byteRange = [0]; const bytes = data.split('@'); if (bytes.length === 1 && prevSegment && prevSegment.byteRange) { this.byteRange[0] = prevSegment.byteRange[1] || 0; if (this.byteRange[0]) this.byteRange[0] += 1; } else { this.byteRange[0] = parseInt(bytes[1]); } this.byteRange[1] = this.byteRange[0] + parseInt(bytes[0]) - 1; } } class MediaSegmentKey { /** @type {?Uint8Array} */ constructor(segKey) { _defineProperty$1(this, "method", ''); _defineProperty$1(this, "url", ''); _defineProperty$1(this, "iv", null); _defineProperty$1(this, "keyFormat", ''); _defineProperty$1(this, "keyFormatVersions", ''); if (segKey instanceof MediaSegmentKey) { this.method = segKey.method; this.url = segKey.url; this.keyFormat = segKey.keyFormat; this.keyFormatVersions = segKey.keyFormatVersions; if (segKey.iv) this.iv = new Uint8Array(segKey.iv); } } clone(sn) { const key = new MediaSegmentKey(this); if (sn !== null && sn !== undefined) key.setIVFromSN(sn); return key; } setIVFromSN(sn) { if (!this.iv && this.method === 'AES-128' && typeof sn === 'number' && this.url) { this.iv = new Uint8Array(16); for (let i = 12; i < 16; i++) { this.iv[i] = sn >> 8 * (15 - i) & 0xff; } } } } // parse media play list function parseMediaPlaylist(lines, parentUrl) { const media = new MediaPlaylist(); media.url = parentUrl; let curSegment = new MediaSegment(); let curInitSegment = null; let curKey = null; let totalDuration = 0; let curSN = 0; let curCC = 0; let index = 0; let line; let endOfList = false; // eslint-disable-next-line no-cond-assign while (line = lines[index++]) { if (endOfList) { break; } if (line[0] !== '#') { // url curSegment.sn = curSN; curSegment.cc = curCC; curSegment.url = getAbsoluteUrl(line, parentUrl); if (curKey) curSegment.key = curKey.clone(curSN); if (curInitSegment) curSegment.initSegment = curInitSegment; media.segments.push(curSegment); curSegment = new MediaSegment(); curSN++; continue; } const tag = parseTag(line); if (!tag) continue; const [name, data] = tag; switch (name) { case 'VERSION': media.version = parseInt(data); break; case 'PLAYLIST-TYPE': media.type = data === null || data === void 0 ? void 0 : data.toUpperCase(); break; case 'TARGETDURATION': media.targetDuration = parseFloat(data); break; case 'ENDLIST': { const lastSegment = media.segments[media.segments.length - 1]; if (lastSegment) { lastSegment.isLast = true; } media.live = false; endOfList = true; } break; case 'MEDIA-SEQUENCE': curSN = media.startSN = parseInt(data); break; case 'DISCONTINUITY-SEQUENCE': curCC = media.startCC = parseInt(data); break; case 'DISCONTINUITY': curCC++; break; case 'BYTERANGE': curSegment.setByteRange(data, media.segments[media.segments.length - 1]); break; case 'EXTINF': { const [duration, title] = data.split(','); curSegment.start = totalDuration; curSegment.duration = parseFloat(duration); totalDuration += curSegment.duration; curSegment.title = title; } break; case 'KEY': { const attr = parseAttr(data); if (attr.METHOD === 'NONE') { curKey = null; break; } if (attr.METHOD !== 'AES-128') throw new Error(`encrypt ${attr.METHOD}/${attr.KEYFORMAT} is not supported`); curKey = new MediaSegmentKey(); curKey.method = attr.METHOD; curKey.url = /^blob:/.test(attr.URI) ? attr.URI : getAbsoluteUrl(attr.URI, parentUrl); curKey.keyFormat = attr.KEYFORMAT || 'identity'; curKey.keyFormatVersions = attr.KEYFORMATVERSIONS; if (attr.IV) { let str = attr.IV.slice(2); str = (str.length & 1 ? '0' : '') + str; curKey.iv = new Uint8Array(str.length / 2); for (let i = 0, l = str.length / 2; i < l; i++) { curKey.iv[i] = parseInt(str.slice(i * 2, i * 2 + 2), 16); } } } break; case 'MAP': { const attr = parseAttr(data); curSegment.url = getAbsoluteUrl(attr.URI, parentUrl); if (attr.BYTERANGE) curSegment.setByteRange(attr.BYTERANGE); curSegment.isInitSegment = true; curSegment.sn = 0; if (curKey) { curSegment.key = curKey.clone(0); } curInitSegment = curSegment; curSegment = new MediaSegment(); } break; } } const lastSegment = media.segments[media.segments.length - 1]; if (lastSegment) media.endSN = lastSegment.sn; media.totalDuration = totalDuration; media.endCC = curCC; return media; } /** * parse master play list * @param {Array<string>} lines * @param {string} parentUrl * @returns {MasterPlaylist} */ function parseMasterPlaylist(lines, parentUrl) { const master = new MasterPlaylist(); let index = 0; let line; const audioStreams = []; const subtitleStreams = []; // eslint-disable-next-line no-cond-assign while (line = lines[index++]) { const tag = parseTag(line); if (!tag) continue; const [name, data] = tag; if (name === 'VERSION') { master.version = parseInt(data); } else if (name === 'MEDIA' && data) { const attr = parseAttr(data); let stream; switch (attr.TYPE) { case 'AUDIO': stream = new AudioStream(); break; case 'SUBTITLES': stream = new SubTitleStream(); break; default: stream = new MediaStream$1(); } stream.url = getAbsoluteUrl(attr.URI, parentUrl); stream.default = attr.DEFAULT === 'YES'; stream.autoSelect = attr.AUTOSELECT === 'YES'; stream.group = attr['GROUP-ID']; stream.name = attr.NAME; stream.lang = attr.LANGUAGE; if (attr.CHANNELS) { stream.channels = Number(attr.CHANNELS.split('/')[0]); if (Number.isNaN(stream.channels)) stream.channels = 0; } if (attr.TYPE === 'AUDIO' && attr.URI) { audioStreams.push(stream); } if (attr.TYPE === 'SUBTITLES') { subtitleStreams.push(stream); } } else if (name === 'STREAM-INF' && data) { const stream = new MasterStream(); const attr = parseAttr(data); stream.bitrate = parseInt(attr['AVERAGE-BANDWIDTH'] || attr.BANDWIDTH); stream.name = attr.NAME; stream.url = getAbsoluteUrl(lines[index++], parentUrl); if (attr.RESOLUTION) { const [w, h] = attr.RESOLUTION.split('x'); stream.width = parseInt(w); stream.height = parseInt(h); } if (attr.CODECS) { const codecs = attr.CODECS.split(/[ ,]+/).filter(Boolean); stream.videoCodec = getCodecs('video', codecs); stream.audioCodec = getCodecs('audio', codecs); stream.textCodec = getCodecs('text', codecs); } stream.audioGroup = attr.AUDIO; stream.subtitleGroup = attr.SUBTITLES; master.streams.push(stream); } } master.streams.forEach((s, i) => { s.id = i; }); if (audioStreams.length) { audioStreams.forEach((s, i) => { s.id = i; }); master.streams.forEach(stream => { if (stream.audioGroup) { stream.audioStreams = audioStreams.filter(x => x.group === stream.audioGroup); } }); } if (subtitleStreams.length) { subtitleStreams.forEach((s, i) => { s.id = i; }); master.streams.forEach(stream => { if (stream.subtitleGroup) { stream.subtitleStreams = subtitleStreams.filter(x => x.group === stream.subtitleGroup); } }); } return master; } class M3U8Parser { static parse() { let text = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ''; let parentUrl = arguments.length > 1 ? arguments[1] : undefined; if (!text.includes('#EXTM3U')) throw new Error('Invalid m3u8 file'); const lines = getLines(text); if (M3U8Parser.isMediaPlaylist(text)) { return parseMediaPlaylist(lines, parentUrl); } return parseMasterPlaylist(lines, parentUrl); } static isMediaPlaylist(text) { return text.includes('#EXTINF:') || text.includes('#EXT-X-TARGETDURATION:'); } } class ManifestLoader { constructor(hls) { _defineProperty$1(this, "_onLoaderRetry", (error, retryTime) => { // 请求发生重试时触发。参数如下。 this.hls.emit(HLS_EVENTS.LOAD_RETRY, { error: StreamingError.network(error), retryTime }); }); this.hls = hls; this.player = hls.player; this.TAG_NAME = 'HlsManifestLoader'; this._timer = null; const { retryCount, retryDelay, loadTimeout, fetchOptions } = this.hls.config; this._loader = new NetLoader({ ...fetchOptions, responseType: 'text', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); this._audioLoader = new NetLoader({ ...fetchOptions, responseType: 'text', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); this._subtitleLoader = new NetLoader({ ...fetchOptions, responseType: 'text', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); } async destroy() { await this.stopPoll(); if (this._audioLoader) { this._audioLoader.destroy(); this._audioLoader = null; } if (this._subtitleLoader) { this._subtitleLoader.destroy(); this._subtitleLoader = null; } if (this._loader) { this._loader.destroy(); this._loader = null; } } // load async load(url, audioUrl, subtitleUrl) { this.player.debug.log(this.TAG_NAME, 'load()', url, audioUrl, subtitleUrl); const toLoad = [this._loader.load(url)]; if (audioUrl) { toLoad.push(this._audioLoader.load(audioUrl)); } if (subtitleUrl) { toLoad.push(this._subtitleLoader.load(subtitleUrl)); } let videoText; let audioText; let subtitleText; try { const [video, audio, subtitle] = await Promise.all(toLoad); if (!video) return []; videoText = video.data; if (audioUrl) { audioText = audio === null || audio === void 0 ? void 0 : audio.data; subtitleText = subtitle === null || subtitle === void 0 ? void 0 : subtitle.data; } else { subtitleText = audio === null || audio === void 0 ? void 0 : audio.data; } } catch (error) { throw StreamingError.network(error); } let playlist; let audioPlaylist; let subtitlePlaylist; try { var _playlist; // playlist 对象 playlist = M3U8Parser.parse(videoText, url); if (((_playlist = playlist) === null || _playlist === void 0 ? void 0 : _playlist.live) === false && playlist.segments && !playlist.segments.length) { throw new Error('empty segments list'); } if (audioText) { audioPlaylist = M3U8Parser.parse(audioText, audioUrl); } if (subtitleText) { subtitlePlaylist = M3U8Parser.parse(subtitleText, subtitleUrl); } } catch (error) { throw new StreamingError(ERR.MANIFEST, ERR.HLS, error); } // playlist if (playlist) { if (playlist.isMaster) { // hls manifest loaded // 主从m3u8格式时,master m3u8文件加载并解析完成后, 抛出master m3u8解析后的结构 this.hls.emit(HLS_EVENTS.HLS_MANIFEST_LOADED, { playlist }); } else { // hls level loaded // 二级m3u8加载并解析完成后,抛出解析后的结构, this.hls.emit(HLS_EVENTS.HLS_LEVEL_LOADED, { playlist }); } } // playlist ,audioPlaylist, subtitlePlaylist return [playlist, audioPlaylist, subtitlePlaylist]; } // poll poll(url, audioUrl, subtitleUrl, cb, errorCb, time) { clearTimeout(this._timer); time = time || 3000; let retryCount = this.hls.config.pollRetryCount; const fn = async () => { clearTimeout(this._timer); try { const res = await this.load(url, audioUrl, subtitleUrl); if (!res[0]) return; retryCount = this.hls.config.pollRetryCount; cb(res[0], res[1], res[2]); } catch (e) { retryCount--; if (retryCount <= 0) { errorCb(e); } } this._timer = setTimeout(fn, time); }; this._timer = setTimeout(fn, time); } stopPoll() { clearTimeout(this._timer); return this.cancel(); } cancel() { return Promise.all([this._loader.cancel(), this._audioLoader.cancel()]); } } class BandwidthService { constructor() { _defineProperty$1(this, "_chunkSpeeds", []); _defineProperty$1(this, "_speeds", []); } // bps addRecord(totalByte, ms) { if (!totalByte || !ms) return; this._speeds.push(8000 * totalByte / ms); this._speeds = this._speeds.slice(-3); } addChunkRecord(totalByte, ms) { if (!totalByte || !ms) return; this._chunkSpeeds.push(8000 * totalByte / ms); this._chunkSpeeds = this._chunkSpeeds.slice(-100); } getAvgSpeed() { if (!this._chunkSpeeds.length && !this._speeds.length) return 0; if (this._speeds.length) { return this._speeds.reduce((a, c) => a += c) / this._speeds.length; } return this._chunkSpeeds.reduce((a, c) => a += c) / this._chunkSpeeds.length; } getLatestSpeed() { if (!this._chunkSpeeds.length && !this._speeds.length) return 0; if (this._speeds.length) { return this._speeds[this._speeds.length - 1]; } return this._chunkSpeeds[this._chunkSpeeds.length - 1]; } reset() { this._chunkSpeeds = []; this._speeds = []; } } class SegmentLoader { constructor(hls) { _defineProperty$1(this, "_emitOnLoaded", (res, url) => { const { data, response, option } = res; const { firstByteTime, startTime, endTime, contentLength } = option || {}; const time = endTime - startTime; this._bandwidthService.addRecord(contentLength || data.byteLength, time); // emit speed 事件 // 当收集到网络速度统计时触发,参数如下 this.hls.emit(HLS_EVENTS.SPEED, { time, byteLength: contentLength, url }); // 在请求完成后触发,参数为 { url: string },url 为请求 url 。 this.hls.emit(HLS_EVENTS.LOAD_COMPLETE, { url, elapsed: time || 0 }); this.hls.emit(HLS_EVENTS.TTFB, { url, responseUrl: response.url, elapsed: firstByteTime - startTime }); // 接收到请求响应头时触发,参数为 { headers: Headers | Recored<string, string>} 。 // 如果当前环境支持 fetch 则 headers 为 Response.headers,否则是普通对象。 this.hls.emit(HLS_EVENTS.LOAD_RESPONSE_HEADERS, { headers: response.headers }); }); _defineProperty$1(this, "_onLoaderRetry", (error, retryTime) => { this.hls.emit(HLS_EVENTS.LOAD_RETRY, { error: StreamingError.network(error), retryTime }); }); this.hls = hls; this.player = hls.player; this._bandwidthService = new BandwidthService(); const { retryCount, retryDelay, loadTimeout, fetchOptions } = this.hls.config; this._segmentLoader = new NetLoader({ ...fetchOptions, responseType: 'arraybuffer', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); this._audioSegmentLoader = new NetLoader({ ...fetchOptions, responseType: 'arraybuffer', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); this._keyLoader = new NetLoader({ ...fetchOptions, responseType: 'arraybuffer', retry: retryCount, retryDelay: retryDelay, timeout: loadTimeout, onRetryError: this._onLoaderRetry }, this.player); } destroy() { this.reset(); if (this._keyLoader) { this._keyLoader.destroy(); this._keyLoader = null; } if (this._audioSegmentLoader) { this._audioSegmentLoader.destroy(); this._audioSegmentLoader = null; } if (this._segmentLoader) { this._segmentLoader.destroy(); this._segmentLoader = null; } } speedInfo() { return { speed: this._bandwidthService.getLatestSpeed(), avgSpeed: this._bandwidthService.getAvgSpeed() }; } resetBandwidth() { this._bandwidthService.reset(); } /** * @param {MediaSegment} seg * @param {MediaSegment} audioSeg * @param {boolean} loadInit * @param {boolean} loadAudioInit */ load(seg, audioSeg, loadInit) { let loadAudioInit = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : loadInit; const toLoad = []; if (seg) toLoad[0] = this.loadVideoSegment(seg, loadInit); if (audioSeg) toLoad[1] = this.loadAudioSegment(audioSeg, loadAudioInit); return Promise.all(toLoad); } /** * @param {MediaSegment} seg * @param {boolean} loadInit */ loadVideoSegment(seg, loadInit) { return this._loadSegment(this._segmentLoader, seg, loadInit); } /** * @param {MediaSegment} seg * @param {boolean} loadInit */ loadAudioSegment(seg, loadInit) { return this._loadSegment(this._audioSegmentLoader, seg, loadInit); } /** * @param {NetLoader} segLoader * @param {MediaSegment} seg * @param {boolean} loadInit */ async _loadSegment(segLoader, seg, loadInit) { var _seg$key; let map; let key; let keyIv; let mapKey; let mapKeyIv; const toLoad = []; // load start // 开始拉流或者后续播放阶段时获取 // 分片在发送请求之前触发,参数为 { url: string },url 为请求 url 。 this.hls.emit(HLS_EVENTS.LOAD_START, { url: seg.url }); toLoad[0] = segLoader.load(seg.url); if (loadInit && seg.initSegment) { var _seg$initSegment$key; const mapUrl = seg.initSegment.url; map = this._mapCache[mapUrl]; if (!map) { // 开始拉流或者后续播放阶段时获取 // 分片在发送请求之前触发,参数为 { url: string },url 为请求 url 。 this.hls.emit(HLS_EVENTS.LOAD_START, { url: mapUrl }); toLoad[1] = segLoader.load(mapUrl).then(r => { if (r) { const l = Object.keys(this._mapCache); if (l > 30) this._mapCache = {}; map = this._mapCache[mapUrl] = r.data; this._emitOnLoaded(r, mapUrl); } }); } const keyUrl = (_seg$initSegment$key = seg.initSegment.key) === null || _seg$initSegment$key === void 0 ? void 0 : _seg$initSegment$key.url; if (keyUrl) { mapKeyIv = seg.initSegment.key.iv; mapKey = this._keyCache[keyUrl]; if (!mapKey) { // 开始拉流或者后续播放阶段时获取 // 分片在发送请求之前触发,参数为 { url: string },url 为请求 url 。 this.hls.emit(HLS_EVENTS.LOAD_START, { url: keyUrl }); toLoad[2] = this._keyLoader.load(keyUrl).then(r => { if (r) { mapKey = this._keyCache[keyUrl] = r.data; this._emitOnLoaded(r, keyUrl); } }); } } } const keyUrl = (_seg$key = seg.key) === null || _seg$key === void 0 ? void 0 : _seg$key.url; if (keyUrl) { keyIv = seg.key.iv; key = this._keyCache[keyUrl]; if (!key) { // 开始拉流或者后续播放阶段时获取 // 分片在发送请求之前触发,参数为 { url: string },url 为请求 url 。 this.hls.emit(HLS_EVENTS.LOAD_START, { url: keyUrl }); toLoad[3] = this._keyLoader.load(keyUrl).then(r => { if (r) { key = this._keyCache[keyUrl] = r.data; this._emitOnLoaded(r, keyUrl); } }); } } const [s] = await Promise.all(toLoad); if (!s) return; const data = s.data; this._emitOnLoaded(s, seg.url); return { data, map, key, mapKey, keyIv, mapKeyIv }; } reset() { this.error = null; this._mapCache = {}; this._keyCache = {}; this._bandwidthService.reset(); } async cancel() { await Promise.all([this._keyLoader.cancel(), this._segmentLoader.cancel(), this._audioSegmentLoader.cancel()]); } } /** * */ class Stream { constructor(playlist, audioPlaylist, subtitlePlaylist) { this.live = undefined; this.id = 0; this.bitrate = 0; this.width = 0; this.height = 0; this.name = ''; this.url = ''; this.audioCodec = ''; this.videoCodec = ''; this.textCodec = ''; this.startCC = 0; this.endCC = 0; this.startSN = 0; this.endSN = -1; this.totalDuration = 0; this.targetDuration = 0; this.snDiff = null; // number this.segments = []; /** @type {import('../../parser').AudioStream[]} */ this.audioStreams = []; /** @type {import('../../parser').SubTitleStream[]} */ this.subtitleStreams = []; /** @type {import('../../parser/model').ClosedCaptionsStream[]} */ this.closedCaptions = []; /** @type {import('../../parser/model').AudioStream | null} */ this.currentAudioStream = null; /** @type {import('../../parser/model').subtitleStreams | null} */ this.currentSubtitleStream = null; this.TAG_NAME = 'HlsStream'; this.update(playlist, audioPlaylist, subtitlePlaylist); } /** * asdasd {@link AudioStream} */ get lastSegment() { if (this.segments.length) { return this.segments[this.segments.length - 1]; } return null; } get segmentDuration() { var _this$segments$; return this.targetDuration || ((_this$segments$ = this.segments[0]) === null || _this$segments$ === void 0 ? void 0 : _this$segments$.duration) || 0; } get liveEdge() { return this.endTime; } get endTime() { var _this$lastSegment; return ((_this$lastSegment = this.lastSegment) === null || _this$lastSegment === void 0 ? void 0 : _this$lastSegment.end) || 0; } get currentSubtitleEndSn() { var _this$currentSubtitle; return ((_this$currentSubtitle = this.currentSubtitleStream) === null || _this$currentSubtitle === void 0 ? void 0 : _this$currentSubtitle.endSN) || 0; } clearOldSegment(startTime, pointer) { // if (this.currentAudioStream) { // this._clearSegments(startTime, pointer) // } return this._clearSegments(startTime, pointer); } getAudioSegment(seg) { if (!seg || !this.currentAudioStream) return; const sn = seg.sn - this.snDiff; return this.currentAudioStream.segments.find(x => x.sn === sn); } update(playlist, audioPlaylist) { this.url = playlist.url; if (Array.isArray(playlist.segments)) { // media if (this.live === null || this.live === undefined) { this.live = playlist.live; } this._updateSegments(playlist, this); this.startCC = playlist.startCC; this.endCC = playlist.endCC; this.startSN = playlist.startSN; this.endSN = playlist.endSN || -1; this.totalDuration = playlist.totalDuration; this.targetDuration = playlist.targetDuration; this.live = playlist.live; if (audioPlaylist && this.currentAudioStream && Array.isArray(audioPlaylist.segments)) { this._updateSegments(audioPlaylist, this.currentAudioStream); if ((this.snDiff === null || this.snDiff === undefined) && playlist.segments.length && audioPlaylist.segments.length) { this.snDiff = playlist.segments[0].sn - audioPlaylist.segments[0].sn; } } } else { // master stream this.id = playlist.id; this.bitrate = playlist.bitrate; this.width = playlist.width; this.height = playlist.height; this.name = playlist.name; this.audioCodec = playlist.audioCodec; this.videoCodec = playlist.videoCodec; this.textCodec = playlist.textCodec; this.audioStreams = playlist.audioStreams; this.subtitleStreams = playlist.subtitleStreams; if (!this.currentAudioStream && this.audioStreams.length) { this.currentAudioStream = this.audioStreams.find(x => x.default) || this.audioStreams[0]; } if (!this.currentSubtitleStream && this.subtitleStreams.length) { this.currentSubtitleStream = this.subtitleStreams.find(x => x.default) || this.subtitleStreams[0]; } } } updateSubtitle(subtitlePlaylist) { if (!(subtitlePlaylist && this.currentSubtitleStream && Array.isArray(subtitlePlaylist.segments))) return; const newSegs = this._updateSegments(subtitlePlaylist, this.currentSubtitleStream); const segs = this.currentSubtitleStream.segments; if (segs.length > 100) { this.currentSubtitleStream.segments = segs.slice(100); } if (!newSegs) return; return newSegs.map(x => { return { sn: x.sn, url: x.url, duration: x.duration, start: x.start, end: x.end, lang: this.currentSubtitleStream.lang }; }); } switchSubtitle(lang) { const toSwitch = this.subtitleStreams.find(x => x.lang === lang); const origin = this.currentSubtitleStream; if (toSwitch) { this.currentSubtitleStream = toSwitch; origin.segments = []; } } _clearSegments(startTime, pointer) { let sliceStart = 0; const segments = this.segments; for (let i = 0, l = segments.length; i < l; i++) { if (segments[i].end >= startTime) { sliceStart = i; break; } } if (sliceStart > pointer) { sliceStart = pointer; } if (sliceStart) { this.segments = this.segments.slice(sliceStart); if (this.currentAudioStream) { this.currentAudioStream.segments = this.currentAudioStream.segments.slice(sliceStart); } } return pointer - sliceStart; } _updateSegments(playlist, segObj) { // 本地 segments const segments = segObj.segments; if (this.live) { // 取最后一个 const endSeg = segments[segments.length - 1]; // 取 endSN const endSN = (endSeg === null || endSeg === void 0 ? void 0 : endSeg.sn) || -1; if (endSN < playlist.endSN && playlist.segments.length) { const index = playlist.segments.findIndex(x => x.sn === endSN); const toAppend = index < 0 ? playlist.segments : playlist.segments.slice(index + 1); if (segments.length && toAppend.length) { let endTime = endSeg.end; toAppend.forEach(seg => { seg.start = endTime; endTime = seg.end; }); const lastCC = (endSeg === null || endSeg === void 0 ? void 0 : endSeg.cc) || -1; if (lastCC > toAppend[0].cc) { toAppend.forEach(seg => seg.cc += lastCC); } } segObj.endSN = playlist.endSN; segObj.segments = segments.concat(toAppend); return toAppend; } } else { segObj.segments = playlist.segments; } } } /** * playlist: m3u8 */ class Playlist { constructor(hls) { this.hls = hls; this.player = hls.player; /** @type {import('./stream').Stream[]} */ this.streams = []; /** @type {import('./stream').Stream} */ this.currentStream = null; // dvr window this.dvrWindow = 0; this._segmentPointer = -1; this.TAG_NAME = 'HlsPlaylist'; } destroy() { this.reset(); } get lastSegment() { var _this$currentStream; return (_this$currentStream = this.currentStream) === null || _this$currentStream === void 0 ? void 0 : _this$currentStream.lastSegment; } get currentSegment() { var _this$currentSegments; return (_this$currentSegments = this.currentSegments) === null || _this$currentSegments === void 0 ? void 0 : _this$currentSegments[this._segmentPointer]; } get nextSegment() { var _this$currentSegments2; return (_this$currentSegments2 = this.currentSegments) === null || _this$currentSegments2 === void 0 ? void 0 : _this$currentSegments2[this._segmentPointer + 1]; } get currentSegments() { var _this$currentStream2; return (_this$currentStream2 = this.currentStream) === null || _this$currentStream2 === void 0 ? void 0 : _this$currentStream2.segments; } get currentSubtitleEndSn() { var _this$currentStream3; return (_this$currentStream3 = this.currentStream) === null || _this$currentStream3 === void 0 ? void 0 : _this$currentStream3.currentSubtitleEndSn; } get liveEdge() { var _this$currentStream4; return (_this$currentStream4 = this.currentStream) === null || _this$currentStream4 === void 0 ? void 0 : _this$currentStream4.liveEdge; } get totalDuration() { var _this$currentStream5; return ((_this$currentStream5 = this.currentStream) === null || _this$currentStream5 === void 0 ? void 0 : _this$currentStream5.totalDuration) || 0; } get seekRange() { const segments = this.currentSegments; if (!segments || !segments.length) return; return [segments[0].start, segments[segments.length - 1].end]; } get isEmpty() { var _this$currentSegments3; return !((_this$currentSegments3 = this.currentSegments) !== null && _this$currentSegments3 !== void 0 && _this$currentSegments3.length); } get isLive() { var _this$currentStream6; return (_this$currentStream6 = this.currentStream) === null || _this$currentStream6 === void 0 ? void 0 : _this$currentStream6.live; } get hasSubtitle() { var _this$currentStream7; return !!((_this$currentStream7 = this.currentStream) !== null && _this$currentStream7 !== void 0 && _this$currentStream7.currentSubtitleStream); } getAudioSegment(seg) { var _this$currentStream8; return (_this$currentStream8 = this.currentStream) === null || _this$currentStream8 === void 0 ? void 0 : _this$currentStream8.getAudioSegment(seg); } moveSegmentPointer(pos) { var _this$currentSegments4; if (pos === null || pos === undefined) pos = this._segmentPointer + 1; this._segmentPointer = clamp(pos, -1, (_this$currentSegments4 = this.currentSegments) === null || _this$currentSegments4 === void 0 ? void 0 : _this$currentSegments4.length); this.player.debug.log(this.TAG_NAME, `moveSegmentPointer() and param pos is ${pos} and clamp result is ${this._segmentPointer}`); } reset() { this.streams = []; this.currentStream = null; this.dvrWindow = 0; this._segmentPointer = -1; } getSegmentByIndex(index) { var _this$currentSegments5; return (_this$currentSegments5 = this.currentSegments) === null || _this$currentSegments5 === void 0 ? void 0 : _this$currentSegments5[index]; } setNextSegmentByIndex() { let index = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; this._segmentPointer = index - 1; this.player.debug.log(this.TAG_NAME, 'setNextSegmentByIndex()', index, this._segmentPointer); } // fxzind segment index by time findSegmentIndexByTime(time) { const segments = this.currentSegments; if (segments) { for (let i = 0, l = segments.length, seg; i < l; i++) { seg = segments[i]; if (time >= seg.start && time < seg.end) { return i; } } const lastSegment = segments[segments.length - 1]; if (Math.abs(time - (lastSegment === null || lastSegment === void 0 ? void 0 : lastSegment.end)) < 0.2) { return segments.length - 1; } } } // upsert play list upsertPlaylist(playlist, audioPlaylist, subtitlePlaylist) { // this.player.debug.log(this.TAG_NAME, 'upsertPlaylist()', playlist, audioPlaylist, subtitlePlaylist); if (!playlist) { this.player.debug.warn(this.TAG_NAME, 'upsertPlaylist() playlist is null'); return; } if (playlist.isMaster) { // streams this.streams.length = playlist.streams.length; playlist.streams.filter(x => x.url).forEach((stream, i) => { if (this.streams[i]) { this.streams[i].update(stream); } else { this.streams[i] = new Stream(stream); } }); this.currentStream = this.streams[0]; // update media } else if (Array.isArray(playlist.segments)) { // current stream const stream = this.currentStream; if (stream) { stream.update(playlist, audioPlaylist, subtitlePlaylist); const newSubtitleSegs = stream.updateSubtitle(subtitlePlaylist); if (newSubtitleSegs) { this.hls.emit(HLS_EVENTS.SUBTITLE_SEGMENTS, { list: newSubtitleSegs }); } } else { this.reset(); this.currentStream = this.streams[0] = new Stream(playlist, audioPlaylist, subtitlePlaylist); } } const currentStream = this.currentStream; if (currentStream) { // if (this.hls.isLive && !this.dvrWindow) { this.dvrWindow = this.currentSegments.reduce((a, c) => { a += c.duration; return a; }, 0); } } } switchSubtitle(lang) { var _this$currentStream9; (_this$currentStream9 = this.currentStream) === null || _this$currentStream9 === void 0 ? void 0 : _this$currentStream9.switchSubtitle(lang); } // clear old segment clearOldSegment() { let maxPlaylistSize = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 50; const stream = this.currentStream; if (!this.dvrWindow || !stream) return; const startTime = stream.endTime - this.dvrWindow; if (startTime <= 0) { this.player.debug.log(this.TAG_NAME, `clearOldSegment() stream.endTime:${stream.endTime}, this.dvrWindow:${this.dvrWindow} startTime <= 0`); return; } const segments = stream.segments; if (segments.length <= maxPlaylistSize) { this.player.debug.log(this.TAG_NAME, `clearOldSegment() segments.length:${segments.length} <= maxPlaylistSize:${maxPlaylistSize}`); return; } const _oldSegmentPointer = this._segmentPointer; this._segmentPointer = stream.clearOldSegment(startTime, _oldSegmentPointer); this.player.debug.log(this.TAG_NAME, 'clearOldSegment() update _segmentPointer:', _oldSegmentPointer, this._segmentPointer); this.player.debug.log(this.TAG_NAME, 'currentSegments', this.currentSegments); } checkSegmentTrackChange(cTime, nbSb) { const index = this.findSegmentIndexByTime(cTime); const seg = this.getSegmentByIndex(index); if (!seg) return; if (!seg.hasAudio && !seg.hasVideo) return; // when seek if (nbSb !== 2 && seg.hasAudio && seg.hasVideo) return seg; // continuous play if (seg.end - cTime > 0.3) return; const next = this.getSegmentByIndex(index + 1); if (!next) return; if (!next.hasAudio && !next.hasVideo) return; if (next.hasAudio !== seg.hasAudio || next.hasVideo !== seg.hasVideo) return next; } } // crypto 模块 class Decryptor { constructor(hls, player) { this.hls = hls; this.player = player; const crypto = window.crypto || window.msCrypto; this.subtle = crypto && (crypto.subtle || crypto.webkitSubtle); /** * @type {IExternalDecryptor} */ this.externalDecryptor = null; } decrypt(video, audio) { if (!video && !audio) return; const ret = []; if (video) { ret[0] = this._decryptSegment(video); } if (audio) { ret[1] = this._decryptSegment(audio); } return Promise.all(ret); } async _decryptSegment(seg) { let data = seg.data; if (seg.key) { data = await this._decryptData(seg.data, seg.key, seg.keyIv); } if (!seg.map) return data; return concatUint8Array(seg.map, data); } async _decryptData(data, key, iv) { if (this.externalDecryptor) { return await this.externalDecryptor.decrypt(data, key, iv); } else { if (!this.subtle) throw new Error('crypto is not defined'); const aesKey = await this.subtle.importKey('raw', key, { name: 'AES-CBC' }, false, ['encrypt', 'decrypt']); return new Uint8Array(await this.subtle.decrypt({ name: 'AES-CBC', iv }, aesKey, data)); } } } function readBig16(data) { let i = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; return (data[i] << 8) + (data[i + 1] || 0); } function readBig24(data) { let i = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; return (data[i] << 16) + (data[i + 1] << 8) + (data[i + 2] || 0); } function readBig32(data) { let i = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; return (data[i] << 24 >>> 0) + (data[i + 1] << 16) + (data[i + 2] << 8) + (data[i + 3] || 0); } function readBig64(data) { let i = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; const MAX_SIZE = Math.pow(2, 32); return readBig32(data, i) * MAX_SIZE + readBig32(data, i + 4); } const AudioCodecType = { AAC: 'aac', G711PCMA: 'g7110a', G711PCMU: 'g7110m' }; const VideoCodecType = { AVC: 'avc', HEVC: 'hevc' }; function getAvcCodec(codecs) { let codec = 'avc1.'; let h; for (let i = 0; i < 3; i++) { h = codecs[i].toString(16); if (h.length < 2) h = `0${h}`; codec += h; } return codec; } class AAC { static getFrameDuration(rate) { let timescale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 90000; return 1024 * timescale / rate; } static getRateIndexByRate(rate) { return AAC.FREQ.indexOf(rate); } } _defineProperty$1(AAC, "FREQ", [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]); function getSamples(stts, stsc, stsz, stco, ctts, stss) { const samples = []; const cttsEntries = ctts === null || ctts === void 0 ? void 0 : ctts.entries; const stscEntries = stsc.entries; const stcoEntries = stco.entries; const stszEntrySizes = stsz.entrySizes; const stssEntries = stss === null || stss === void 0 ? void 0 : stss.entries; let keyframeMap; if (stssEntries) { keyframeMap = {}; stssEntries.forEach(x => { keyframeMap[x - 1] = true; }); } let cttsArr; if (cttsEntries) { cttsArr = []; cttsEntries.forEach(_ref => { let { count, offset } = _ref; for (let i = 0; i < count; i++) { cttsArr.push(offset); } }); } let sample; let gopId = -1; let dts = 0; let pos = 0; let chunkIndex = 0; let chunkRunIndex = 0; let offsetInChunk = 0; let lastSampleInChunk = stscEntries[0].samplesPerChunk; let lastChunkInRun = stscEntries[1] ? stscEntries[1].firstChunk - 1 : Infinity; stts.entries.forEach(_ref2 => { let { count, delta } = _ref2; for (let i = 0; i < count; i++) { sample = { dts, duration: delta, size: stszEntrySizes[pos] || stsz.sampleSize, offset: stcoEntries[chunkIndex] + offsetInChunk, index: pos }; if (stssEntries) { sample.keyframe = keyframeMap[pos]; if (sample.keyframe) { gopId++; } sample.gopId = gopId; } // sample.timeOffset = 0 if (cttsArr && pos < cttsArr.length) { sample.pts = sample.dts + cttsArr[pos]; // sample.timeOffset = cttsArr[pos] // if (pos === 0) { // sample.pts = sample.dts // sample.timeOffset = 0 // } } samples.push(sample); dts += delta; pos++; if (pos < lastSampleInChunk) { offsetInChunk += sample.size; } else { chunkIndex++; offsetInChunk = 0; if (chunkIndex >= lastChunkInRun) { chunkRunIndex++; lastChunkInRun = stscEntries[chunkRunIndex + 1] ? stscEntries[chunkRunIndex + 1].firstChunk - 1 : Infinity; } lastSampleInChunk += stscEntries[chunkRunIndex].samplesPerChunk; } } }); return samples; } function parseVisualSampleEntry(ret, data) { ret.dataReferenceIndex = readBig16(data, 6); ret.width = readBig16(data, 24); ret.height = readBig16(data, 26); ret.horizresolution = readBig32(data, 28); ret.vertresolution = readBig32(data, 32); ret.frameCount = readBig16(data, 40); ret.depth = readBig16(data, 74); return 78; } function parseAudioSampleEntry(ret, data) { ret.dataReferenceIndex = readBig16(data, 6); ret.channelCount = readBig16(data, 16); ret.sampleSize = readBig16(data, 18); ret.sampleRate = readBig32(data, 24) / (1 << 16); return 28; } function parseBox(box, isFullBox, parse) { if (!box) return; if (box.size !== box.data.length) throw new Error(`box ${box.type} size !== data.length`); const ret = { start: box.start, size: box.size, headerSize: box.headerSize, type: box.type }; if (isFullBox) { ret.version = box.data[box.headerSize]; ret.flags = readBig24(box.data, box.headerSize + 1); ret.headerSize += 4; } parse(ret, box.data.subarray(ret.headerSize), ret.start + ret.headerSize); return ret; } const padStart = function (str, length, pad) { const charstr = String(pad); const len = length >> 0; let maxlen = Math.ceil(len / charstr.length); const chars = []; const r = String(str); while (maxlen--) { chars.push(charstr); } return chars.join('').substring(0, len - r.length) + r; }; const toHex = function () { const hex = []; for (var _len = arguments.length, value = new Array(_len), _key = 0; _key < _len; _key++) { value[_key] = arguments[_key]; } value.forEach(item => { hex.push(padStart(Number(item).toString(16), 2, 0)); }); return hex[0]; }; class MP4Parser { static probe(data) { return !!MP4Parser.findBox(data, ['ftyp']); } static findBox(data, names) { let start = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; const ret = []; if (!data) return ret; let size = 0; let type = ''; let headerSize = 0; while (data.length > 7) { size = readBig32(data); type = String.fromCharCode.apply(null, data.subarray(4, 8)); headerSize = 8; if (size === 1) { size = readBig64(data, 8); headerSize += 8; } else if (!size) { size = data.length; } if (!names[0] || type === names[0]) { const subData = data.subarray(0, size); if (names.length < 2) { ret.push({ start, size, headerSize, type, data: subData }); } else { return MP4Parser.findBox(subData.subarray(headerSize), names.slice(1), start + headerSize); } } start += size; data = data.subarray(size); } return ret; } static tfhd(box) { return parseBox(box, true, (ret, data) => { ret.trackId = readBig32(data); let start = 4; const baseDataOffsetPresent = ret.flags & 0xff & 0x01; const sampleDescriptionIndexPresent = ret.flags & 0xff & 0x02; const defaultSampleDurationPresent = ret.flags & 0xff & 0x08; const defaultSampleSizePresent = ret.flags & 0xff & 0x10; const defaultSampleFlagsPresent = ret.flags & 0xff & 0x20; if (baseDataOffsetPresent) { start += 4; // truncate top 4 bytes ret.baseDataOffset = readBig32(data, start); start += 4; } if (sampleDescriptionIndexPresent) { ret.sampleDescriptionIndex = readBig32(data, start); start += 4; } if (defaultSampleDurationPresent) { ret.defaultSampleDuration = readBig32(data, start); start += 4; } if (defaultSampleSizePresent) { ret.defaultSampleSize = readBig32(data, start); start += 4; } if (defaultSampleFlagsPresent) { ret.defaultSampleFlags = readBig32(data, start); } }); } static sidx(box) { return parseBox(box, true, (ret, data) => { let start = 0; ret.reference_ID = readBig32(data, start); // stream.readUint32(); start += 4; ret.timescale = readBig32(data, start); start += 4; if (ret.version === 0) { ret.earliest_presentation_time = readBig32(data, start); start += 4; ret.first_offset = readBig32(data, start); start += 4; } else { ret.earliest_presentation_time = readBig64(data, start); start += 8; ret.first_offset = readBig64(data, start); start += 8; } start += 2; ret.references = []; const count = readBig16(data, start); start += 2; for (let i = 0; i < count; i++) { const ref = {}; ret.references.push(ref); let tmp32 = readBig32(data, start); start += 4; ref.reference_type = tmp32 >> 31 & 0x1; ref.referenced_size = tmp32 & 0x7FFFFFFF; ref.subsegment_duration = readBig32(data, start); start += 4; tmp32 = readBig32(data, start); start += 4; ref.starts_with_SAP = tmp32 >> 31 & 0x1; ref.SAP_type = tmp32 >> 28 & 0x7; ref.SAP_delta_time = tmp32 & 0xFFFFFFF; } }); } static moov(box) { return parseBox(box, false, (ret, data, start) => { ret.mvhd = MP4Parser.mvhd(MP4Parser.findBox(data, ['mvhd'], start)[0]); ret.trak = MP4Parser.findBox(data, ['trak'], start).map(trak => MP4Parser.trak(trak)); ret.pssh = MP4Parser.pssh(MP4Parser.findBox(data, ['pssh'], start)[0]); }); } static mvhd(box) { return parseBox(box, true, (ret, data) => { let start = 0; if (ret.version === 1) { ret.timescale = readBig32(data, 16); ret.duration = readBig64(data, 20); start += 28; } else { ret.timescale = readBig32(data, 8); ret.duration = readBig32(data, 12); start += 16; } ret.nextTrackId = readBig32(data, start + 76); }); } static trak(box) { return parseBox(box, false, (ret, data, start) => { ret.tkhd = MP4Parser.tkhd(MP4Parser.findBox(data, ['tkhd'], start)[0]); ret.mdia = MP4Parser.mdia(MP4Parser.findBox(data, ['mdia'], start)[0]); }); } static tkhd(box) { return parseBox(box, true, (ret, data) => { let start = 0; if (ret.version === 1) { ret.trackId = readBig32(data, 16); ret.duration = readBig64(data, 24); start += 32; } else { ret.trackId = readBig32(data, 8); ret.duration = readBig32(data, 16); start += 20; } ret.width = readBig32(data, start + 52); ret.height = readBig32(data, start + 56); }); } static mdia(box) { return parseBox(box, false, (ret, data, start) => { ret.mdhd = MP4Parser.mdhd(MP4Parser.findBox(data, ['mdhd'], start)[0]); ret.hdlr = MP4Parser.hdlr(MP4Parser.findBox(data, ['hdlr'], start)[0]); ret.minf = MP4Parser.minf(MP4Parser.findBox(data, ['minf'], start)[0]); }); } static mdhd(box) { return parseBox(box, true, (ret, data) => { let start = 0; if (ret.version === 1) { ret.timescale = readBig32(data, 16); ret.duration = readBig64(data, 20); start += 28; } else { ret.timescale = readBig32(data, 8); ret.duration = readBig32(data, 12); start += 16; } const lang = readBig16(data, start); ret.language = String.fromCharCode((lang >> 10 & 0x1F) + 0x60, (lang >> 5 & 0x1F) + 0x60, (lang & 0x1F) + 0x60); }); } static hdlr(box) { return parseBox(box, true, (ret, data) => { if (ret.version === 0) { ret.handlerType = String.fromCharCode.apply(null, data.subarray(4, 8)); } }); } static minf(box) { return parseBox(box, false, (ret, data, start) => { ret.vmhd = MP4Parser.vmhd(MP4Parser.findBox(data, ['vmhd'], start)[0]); ret.smhd = MP4Parser.smhd(MP4Parser.findBox(data, ['smhd'], start)[0]); ret.stbl = MP4Parser.stbl(MP4Parser.findBox(data, ['stbl'], start)[0]); }); } static vmhd(box) { return parseBox(box, true, (ret, data) => { ret.graphicsmode = readBig16(data); ret.opcolor = [readBig16(data, 2), readBig16(data, 4), readBig16(data, 6)]; }); } static smhd(box) { return parseBox(box, true, (ret, data) => { ret.balance = readBig16(data); }); } static stbl(box) { return parseBox(box, false, (ret, data, start) => { var _ret$stsd$entries$, _ret$stsd$entries$$si, _ret$stsd$entries$$si2; ret.stsd = MP4Parser.stsd(MP4Parser.findBox(data, ['stsd'], start)[0]); ret.stts = MP4Parser.stts(MP4Parser.findBox(data, ['stts'], start)[0]); ret.ctts = MP4Parser.ctts(MP4Parser.findBox(data, ['ctts'], start)[0]); ret.stsc = MP4Parser.stsc(MP4Parser.findBox(data, ['stsc'], start)[0]); ret.stsz = MP4Parser.stsz(MP4Parser.findBox(data, ['stsz'], start)[0]); ret.stco = MP4Parser.stco(MP4Parser.findBox(data, ['stco'], start)[0]); if (!ret.stco) { ret.co64 = MP4Parser.co64(MP4Parser.findBox(data, ['co64'], start)[0]); ret.stco = ret.co64; } const default_IV_size = (_ret$stsd$entries$ = ret.stsd.entries[0]) === null || _ret$stsd$entries$ === void 0 ? void 0 : (_ret$stsd$entries$$si = _ret$stsd$entries$.sinf) === null || _ret$stsd$entries$$si === void 0 ? void 0 : (_ret$stsd$entries$$si2 = _ret$stsd$entries$$si.schi) === null || _ret$stsd$entries$$si2 === void 0 ? void 0 : _ret$stsd$entries$$si2.tenc.default_IV_size; ret.stss = MP4Parser.stss(MP4Parser.findBox(data, ['stss'], start)[0]); ret.senc = MP4Parser.senc(MP4Parser.findBox(data, ['senc'], start)[0], default_IV_size); }); } static senc(box) { let iv = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 8; return parseBox(box, true, (ret, data) => { let start = 0; const sampleCount = readBig32(data, start); start += 4; ret.samples = []; for (let i = 0; i < sampleCount; i++) { const sample = {}; sample.InitializationVector = []; for (let j = 0; j < iv; j++) { sample.InitializationVector[j] = data[start + j]; } start += iv; if (ret.flags & 0x2) { sample.subsamples = []; const subsampleCount = readBig16(data, start); start += 2; for (let j = 0; j < subsampleCount; j++) { const subsample = {}; subsample.BytesOfClearData = readBig16(data, start); start += 2; subsample.BytesOfProtectedData = readBig32(data, start); start += 4; sample.subsamples.push(subsample); } } ret.samples.push(sample); } }); } static pssh(box) { return parseBox(box, true, (ret, data) => { const keyIds = []; const systemId = []; let start = 0; for (let i = 0; i < 16; i++) { systemId.push(toHex(data[start + i])); } start += 16; if (ret.version > 0) { const numKeyIds = readBig32(data, start); start += 4; for (let i = 0; i < ('' + numKeyIds).length; i++) { for (let j = 0; j < 16; j++) { const keyId = data[start]; start += 1; keyIds.push(toHex(keyId)); } } } const dataSize = readBig32(data, start); ret.data_size = dataSize; start += 4; ret.kid = keyIds; ret.system_id = systemId; ret.buffer = data; }); } static stsd(box) { return parseBox(box, true, (ret, data, start) => { ret.entryCount = readBig32(data); ret.entries = MP4Parser.findBox(data.subarray(4), [], start + 4).map(b => { switch (b.type) { case 'avc1': case 'avc2': case 'avc3': case 'avc4': return MP4Parser.avc1(b); case 'hvc1': case 'hev1': return MP4Parser.hvc1(b); case 'mp4a': return MP4Parser.mp4a(b); case 'alaw': case 'ulaw': return MP4Parser.alaw(b); case 'enca': // sinf->schi->tenc return parseBox(b, false, (ret, data, start) => { ret.channelCount = readBig16(data, 16); ret.samplesize = readBig16(data, 18); ret.sampleRate = readBig32(data, 24) / (1 << 16); data = data.subarray(28); ret.sinf = MP4Parser.sinf(MP4Parser.findBox(data, ['sinf'], start)[0]); ret.esds = MP4Parser.esds(MP4Parser.findBox(data, ['esds'], start)[0]); }); case 'encv': // sinf->schi->tenc return parseBox(b, false, (ret, data, start) => { ret.width = readBig16(data, 24); ret.height = readBig16(data, 26); ret.horizresolution = readBig32(data, 28); ret.vertresolution = readBig32(data, 32); data = data.subarray(78); ret.sinf = MP4Parser.sinf(MP4Parser.findBox(data, ['sinf'], start)[0]); ret.avcC = MP4Parser.avcC(MP4Parser.findBox(data, ['avcC'], start)[0]); ret.hvcC = MP4Parser.hvcC(MP4Parser.findBox(data, ['hvcC'], start)[0]); ret.pasp = MP4Parser.pasp(MP4Parser.findBox(data, ['pasp'], start)[0]); }); } }).filter(Boolean); }); } static tenc(box) { return parseBox(box, false, (ret, data) => { let start = 6; ret.default_IsEncrypted = data[start]; start += 1; ret.default_IV_size = data[start]; start += 1; ret.default_KID = []; for (let i = 0; i < 16; i++) { ret.default_KID.push(toHex(data[start])); start += 1; } }); } static schi(box) { return parseBox(box, false, (ret, data, start) => { ret.tenc = MP4Parser.tenc(MP4Parser.findBox(data, ['tenc'], start)[0]); }); } static sinf(box) { return parseBox(box, false, (ret, data, start) => { ret.schi = MP4Parser.schi(MP4Parser.findBox(data, ['schi'], start)[0]); ret.frma = MP4Parser.frma(MP4Parser.findBox(data, ['frma'], start)[0]); }); } static frma(box) { return parseBox(box, false, (ret, data) => { ret.data_format = ''; for (let i = 0; i < 4; i++) { ret.data_format += String.fromCharCode(data[i]); } }); } static avc1(box) { return parseBox(box, false, (ret, data, start) => { const bodyStart = parseVisualSampleEntry(ret, data); const bodyData = data.subarray(bodyStart); start += bodyStart; ret.avcC = MP4Parser.avcC(MP4Parser.findBox(bodyData, ['avcC'], start)[0]); ret.pasp = MP4Parser.pasp(MP4Parser.findBox(bodyData, ['pasp'], start)[0]); }); } static avcC(box) { return parseBox(box, false, (ret, data) => { ret.configurationVersion = data[0]; ret.AVCProfileIndication = data[1]; ret.profileCompatibility = data[2]; ret.AVCLevelIndication = data[3]; ret.codec = getAvcCodec([data[1], data[2], data[3]]); ret.lengthSizeMinusOne = data[4] & 0x3; ret.spsLength = data[5] & 0x1F; ret.sps = []; let start = 6; for (let i = 0; i < ret.spsLength; i++) { const size = readBig16(data, start); start += 2; ret.sps.push(data.subarray(start, start + size)); // ret.spsInfo = SpsParser.parseSPS(ret.sps[i]) // ret.pixelRatio = ret.spsInfo.par_ratio start += size; } ret.ppsLength = data[start]; start += 1; ret.pps = []; for (let i = 0; i < ret.ppsLength; i++) { const size = readBig16(data, start); start += 2; ret.pps.push(data.subarray(start, start += size)); start += size; } }); } static hvc1(box) { return parseBox(box, false, (ret, data, start) => { const bodyStart = parseVisualSampleEntry(ret, data); const bodyData = data.subarray(bodyStart); start += bodyStart; ret.hvcC = MP4Parser.hvcC(MP4Parser.findBox(bodyData, ['hvcC'], start)[0]); ret.pasp = MP4Parser.pasp(MP4Parser.findBox(bodyData, ['pasp'], start)[0]); }); } static hvcC(box) { return parseBox(box, false, (ret, data) => { ret.data = box.data; ret.codec = 'hev1.1.6.L93.B0'; ret.configurationVersion = data[0]; const tmp = data[1]; ret.generalProfileSpace = tmp >> 6; ret.generalTierFlag = (tmp & 0x20) >> 5; ret.generalProfileIdc = tmp & 0x1F; ret.generalProfileCompatibility = readBig32(data, 2); ret.generalConstraintIndicatorFlags = data.subarray(6, 12); ret.generalLevelIdc = data[12]; ret.avgFrameRate = readBig16(data, 19); ret.numOfArrays = data[22]; ret.vps = []; ret.sps = []; ret.pps = []; let start = 23; let type = 0; let numNalus = 0; let size = 0; for (let i = 0; i < ret.numOfArrays; i++) { type = data[start] & 0x3F; numNalus = readBig16(data, start + 1); start += 3; const nalus = []; for (let j = 0; j < numNalus; j++) { size = readBig16(data, start); start += 2; nalus.push(data.subarray(start, start + size)); start += size; } if (type === 32) { ret.vps.push(...nalus); } else if (type === 33) { ret.sps.push(...nalus); } else if (type === 34) { ret.pps.push(...nalus); } } }); } static pasp(box) { return parseBox(box, false, (ret, data) => { ret.hSpacing = readBig32(data); ret.vSpacing = readBig32(data, 4); }); } static mp4a(box) { return parseBox(box, false, (ret, data, start) => { const bodyStart = parseAudioSampleEntry(ret, data); ret.esds = MP4Parser.esds(MP4Parser.findBox(data.subarray(bodyStart), ['esds'], start + bodyStart)[0]); }); } static esds(box) { return parseBox(box, true, (ret, data) => { ret.codec = 'mp4a.'; let start = 0; let byteRead = 0; let size = 0; let tag = 0; while (data.length) { start = 0; tag = data[start]; byteRead = data[start + 1]; start += 2; while (byteRead & 0x80) { size = (byteRead & 0x7F) << 7; byteRead = data[start]; start += 1; } size += byteRead & 0x7F; if (tag === 3) { data = data.subarray(start + 3); } else if (tag === 4) { ret.codec += (data[start].toString(16) + '.').padStart(3, '0'); data = data.subarray(start + 13); } else if (tag === 5) { const config = ret.config = data.subarray(start, start + size); let objectType = (config[0] & 0xF8) >> 3; if (objectType === 31 && config.length >= 2) { objectType = 32 + ((config[0] & 0x7) << 3) + ((config[1] & 0xE0) >> 5); } ret.objectType = objectType; ret.codec += objectType.toString(16); if (ret.codec[ret.codec.length - 1] === '.') { ret.codec = ret.codec.substring(0, ret.codec.length - 1); } return; } else { if (ret.codec[ret.codec.length - 1] === '.') { ret.codec = ret.codec.substring(0, ret.codec.length - 1); } return; } } }); } static alaw(box) { return parseBox(box, false, (ret, data) => { parseAudioSampleEntry(ret, data); }); } static stts(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; for (let i = 0; i < entryCount; i++) { entries.push({ count: readBig32(data, start), delta: readBig32(data, start + 4) }); start += 8; } ret.entryCount = entryCount; ret.entries = entries; }); } static ctts(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; if (ret.version === 1) { for (let i = 0; i < entryCount; i++) { entries.push({ count: readBig32(data, start), offset: readBig32(data, start + 4) }); start += 8; } } else { for (let i = 0; i < entryCount; i++) { entries.push({ count: readBig32(data, start), offset: -(~readBig32(data, start + 4) + 1) }); start += 8; } } ret.entryCount = entryCount; ret.entries = entries; }); } static stsc(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; for (let i = 0; i < entryCount; i++) { entries.push({ firstChunk: readBig32(data, start), samplesPerChunk: readBig32(data, start + 4), sampleDescriptionIndex: readBig32(data, start + 8) }); start += 12; } ret.entryCount = entryCount; ret.entries = entries; }); } static stsz(box) { return parseBox(box, true, (ret, data) => { const sampleSize = readBig32(data); const sampleCount = readBig32(data, 4); const entrySizes = []; if (!sampleSize) { let start = 8; for (let i = 0; i < sampleCount; i++) { entrySizes.push(readBig32(data, start)); start += 4; } } ret.sampleSize = sampleSize; ret.sampleCount = sampleCount; ret.entrySizes = entrySizes; }); } static stco(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; for (let i = 0; i < entryCount; i++) { entries.push(readBig32(data, start)); start += 4; } ret.entryCount = entryCount; ret.entries = entries; }); } static co64(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; for (let i = 0; i < entryCount; i++) { entries.push(readBig64(data, start)); start += 8; } ret.entryCount = entryCount; ret.entries = entries; }); } static stss(box) { return parseBox(box, true, (ret, data) => { const entryCount = readBig32(data); const entries = []; let start = 4; for (let i = 0; i < entryCount; i++) { entries.push(readBig32(data, start)); start += 4; } ret.entryCount = entryCount; ret.entries = entries; }); } static moof(box) { return parseBox(box, false, (ret, data, start) => { ret.mfhd = MP4Parser.mfhd(MP4Parser.findBox(data, ['mfhd'], start)[0]); ret.traf = MP4Parser.findBox(data, ['traf'], start).map(t => MP4Parser.traf(t)); }); } static mfhd(box) { return parseBox(box, true, (ret, data) => { ret.sequenceNumber = readBig32(data); }); } static traf(box) { return parseBox(box, false, (ret, data, start) => { ret.tfhd = MP4Parser.tfhd(MP4Parser.findBox(data, ['tfhd'], start)[0]); ret.tfdt = MP4Parser.tfdt(MP4Parser.findBox(data, ['tfdt'], start)[0]); ret.trun = MP4Parser.trun(MP4Parser.findBox(data, ['trun'], start)[0]); }); } static trun(box) { return parseBox(box, true, (ret, data) => { const { version, flags } = ret; const dataLen = data.length; const sampleCount = ret.sampleCount = readBig32(data); let offset = 4; if (dataLen > offset && flags & 1) { ret.dataOffset = -(~readBig32(data, offset) + 1); offset += 4; } if (dataLen > offset && flags & 4) { ret.firstSampleFlags = readBig32(data, offset); offset += 4; } ret.samples = []; if (dataLen > offset) { let sample; for (let i = 0; i < sampleCount; i++) { sample = {}; if (flags & 0x100) { sample.duration = readBig32(data, offset); offset += 4; } if (flags & 0x200) { sample.size = readBig32(data, offset); offset += 4; } if (flags & 0x400) { sample.flags = readBig32(data, offset); offset += 4; } if (flags & 0x800) { if (version) { sample.cts = -(~readBig32(data, offset + 4) + 1); } else { sample.cts = readBig32(data, offset); } offset += 4; } ret.samples.push(sample); } } }); } static tfdt(box) { return parseBox(box, true, (ret, data) => { if (ret.version === 1) { ret.baseMediaDecodeTime = readBig64(data); } else { ret.baseMediaDecodeTime = readBig32(data); } }); } static probe(data) { return !!MP4Parser.findBox(data, ['ftyp']); } static parseSampleFlags(flags) { return { isLeading: (flags[0] & 0x0c) >>> 2, dependsOn: flags[0] & 0x03, isDependedOn: (flags[1] & 0xc0) >>> 6, hasRedundancy: (flags[1] & 0x30) >>> 4, paddingValue: (flags[1] & 0x0e) >>> 1, isNonSyncSample: flags[1] & 0x01, degradationPriority: flags[2] << 8 | flags[3] }; } static moovToTrack(moov, videoTrack, audioTrack) { var _e1$esds, _e1$esds2; const tracks = moov.trak; if (!tracks || !tracks.length) return; const vTrack = tracks.find(t => { var _t$mdia, _t$mdia$hdlr; return ((_t$mdia = t.mdia) === null || _t$mdia === void 0 ? void 0 : (_t$mdia$hdlr = _t$mdia.hdlr) === null || _t$mdia$hdlr === void 0 ? void 0 : _t$mdia$hdlr.handlerType) === 'vide'; }); const aTrack = tracks.find(t => { var _t$mdia2, _t$mdia2$hdlr; return ((_t$mdia2 = t.mdia) === null || _t$mdia2 === void 0 ? void 0 : (_t$mdia2$hdlr = _t$mdia2.hdlr) === null || _t$mdia2$hdlr === void 0 ? void 0 : _t$mdia2$hdlr.handlerType) === 'soun'; }); if (vTrack && videoTrack) { var _vTrack$tkhd, _vTrack$mdia, _vTrack$mdia$minf, _vTrack$mdia$minf$stb, _vTrack$mdia2, _vTrack$mdia2$minf, _vTrack$mdia2$minf$st; const v = videoTrack; const _vTrackId = (_vTrack$tkhd = vTrack.tkhd) === null || _vTrack$tkhd === void 0 ? void 0 : _vTrack$tkhd.trackId; if (_vTrackId !== null && _vTrackId !== undefined) v.id = vTrack.tkhd.trackId; v.tkhdDuration = vTrack.tkhd.duration; v.mvhdDurtion = moov.mvhd.duration; v.mvhdTimecale = moov.mvhd.timescale; v.timescale = v.formatTimescale = vTrack.mdia.mdhd.timescale; v.duration = vTrack.mdia.mdhd.duration || v.mvhdDurtion / v.mvhdTimecale * v.timescale; const e1 = vTrack.mdia.minf.stbl.stsd.entries[0]; v.width = e1.width; v.height = e1.height; if (e1.pasp) { v.sarRatio = [e1.pasp.hSpacing, e1.pasp.vSpacing]; } if (e1.hvcC) { v.codecType = VideoCodecType.HEVC; v.codec = e1.hvcC.codec; v.vps = e1.hvcC.vps; v.sps = e1.hvcC.sps; v.pps = e1.hvcC.pps; v.hvcC = e1.hvcC.data; } else if (e1.avcC) { v.codecType = VideoCodecType.AVC; v.codec = e1.avcC.codec; v.sps = e1.avcC.sps; v.pps = e1.avcC.pps; } else { throw new Error('unknown video stsd entry'); } v.present = true; v.ext = {}; v.ext.stss = (_vTrack$mdia = vTrack.mdia) === null || _vTrack$mdia === void 0 ? void 0 : (_vTrack$mdia$minf = _vTrack$mdia.minf) === null || _vTrack$mdia$minf === void 0 ? void 0 : (_vTrack$mdia$minf$stb = _vTrack$mdia$minf.stbl) === null || _vTrack$mdia$minf$stb === void 0 ? void 0 : _vTrack$mdia$minf$stb.stss; v.ext.ctts = (_vTrack$mdia2 = vTrack.mdia) === null || _vTrack$mdia2 === void 0 ? void 0 : (_vTrack$mdia2$minf = _vTrack$mdia2.minf) === null || _vTrack$mdia2$minf === void 0 ? void 0 : (_vTrack$mdia2$minf$st = _vTrack$mdia2$minf.stbl) === null || _vTrack$mdia2$minf$st === void 0 ? void 0 : _vTrack$mdia2$minf$st.ctts; if (e1 && e1.type === 'encv') { var _e1$sinf, _e1$sinf$schi, _e1$sinf2, _e1$sinf2$schi, _e1$sinf3, _e1$sinf3$schi, _e1$sinf4, _e1$sinf4$frma; v.isVideoEncryption = true; e1.default_KID = (_e1$sinf = e1.sinf) === null || _e1$sinf === void 0 ? void 0 : (_e1$sinf$schi = _e1$sinf.schi) === null || _e1$sinf$schi === void 0 ? void 0 : _e1$sinf$schi.tenc.default_KID; e1.default_IsEncrypted = (_e1$sinf2 = e1.sinf) === null || _e1$sinf2 === void 0 ? void 0 : (_e1$sinf2$schi = _e1$sinf2.schi) === null || _e1$sinf2$schi === void 0 ? void 0 : _e1$sinf2$schi.tenc.default_IsEncrypted; e1.default_IV_size = (_e1$sinf3 = e1.sinf) === null || _e1$sinf3 === void 0 ? void 0 : (_e1$sinf3$schi = _e1$sinf3.schi) === null || _e1$sinf3$schi === void 0 ? void 0 : _e1$sinf3$schi.tenc.default_IV_size; v.videoSenc = vTrack.mdia.minf.stbl.senc && vTrack.mdia.minf.stbl.senc.samples; e1.data_format = (_e1$sinf4 = e1.sinf) === null || _e1$sinf4 === void 0 ? void 0 : (_e1$sinf4$frma = _e1$sinf4.frma) === null || _e1$sinf4$frma === void 0 ? void 0 : _e1$sinf4$frma.data_format; v.useEME = moov.useEME; v.kidValue = moov.kidValue; v.pssh = moov.pssh; v.encv = e1; } } if (aTrack && audioTrack) { var _aTrack$tkhd, _e1$esds3, _e1$esds4, _aTrack$mdia, _aTrack$mdia$minf, _aTrack$mdia$minf$stb, _aTrack$mdia2, _aTrack$mdia2$minf, _aTrack$mdia2$minf$st; const a = audioTrack; const _aTrackId = (_aTrack$tkhd = aTrack.tkhd) === null || _aTrack$tkhd === void 0 ? void 0 : _aTrack$tkhd.trackId; if (_aTrackId !== null && _aTrackId !== undefined) a.id = aTrack.tkhd.trackId; a.tkhdDuration = aTrack.tkhd.duration; a.mvhdDurtion = moov.mvhd.duration; a.mvhdTimecale = moov.mvhd.timescale; a.timescale = a.formatTimescale = aTrack.mdia.mdhd.timescale; a.duration = aTrack.mdia.mdhd.duration || a.mvhdDurtion / a.mvhdTimecale * a.timescale; const e1 = aTrack.mdia.minf.stbl.stsd.entries[0]; a.sampleSize = e1.sampleSize; a.sampleRate = e1.sampleRate; a.channelCount = e1.channelCount; a.present = true; switch (e1.type) { case 'alaw': a.codecType = a.codec = AudioCodecType.G711PCMA; a.sampleRate = 8000; break; case 'ulaw': a.codecType = a.codec = AudioCodecType.G711PCMU; a.sampleRate = 8000; break; default: a.codecType = AudioCodecType.AAC; a.sampleDuration = AAC.getFrameDuration(a.sampleRate, a.timescale); a.sampleRateIndex = AAC.getRateIndexByRate(a.sampleRate); a.objectType = ((_e1$esds = e1.esds) === null || _e1$esds === void 0 ? void 0 : _e1$esds.objectType) || 2; if (e1.esds) a.config = Array.from(e1.esds.config); a.codec = ((_e1$esds2 = e1.esds) === null || _e1$esds2 === void 0 ? void 0 : _e1$esds2.codec) || 'mp4a.40.2'; break; } a.sampleDuration = AAC.getFrameDuration(a.sampleRate, a.timescale); a.objectType = ((_e1$esds3 = e1.esds) === null || _e1$esds3 === void 0 ? void 0 : _e1$esds3.objectType) || 2; if (e1.esds) { if (e1.esds.config) { a.config = Array.from(e1.esds.config); } else { console.warn('esds config is null'); } } a.codec = ((_e1$esds4 = e1.esds) === null || _e1$esds4 === void 0 ? void 0 : _e1$esds4.codec) || 'mp4a.40.2'; a.sampleRateIndex = AAC.getRateIndexByRate(a.sampleRate); a.ext = {}; a.ext.stss = (_aTrack$mdia = aTrack.mdia) === null || _aTrack$mdia === void 0 ? void 0 : (_aTrack$mdia$minf = _aTrack$mdia.minf) === null || _aTrack$mdia$minf === void 0 ? void 0 : (_aTrack$mdia$minf$stb = _aTrack$mdia$minf.stbl) === null || _aTrack$mdia$minf$stb === void 0 ? void 0 : _aTrack$mdia$minf$stb.stss; a.ext.ctts = (_aTrack$mdia2 = aTrack.mdia) === null || _aTrack$mdia2 === void 0 ? void 0 : (_aTrack$mdia2$minf = _aTrack$mdia2.minf) === null || _aTrack$mdia2$minf === void 0 ? void 0 : (_aTrack$mdia2$minf$st = _aTrack$mdia2$minf.stbl) === null || _aTrack$mdia2$minf$st === void 0 ? void 0 : _aTrack$mdia2$minf$st.ctts; a.present = true; if (e1 && e1.type === 'enca') { var _e1$sinf5, _e1$sinf5$frma, _e1$sinf6, _e1$sinf6$schi, _e1$sinf7, _e1$sinf7$schi, _e1$sinf8, _e1$sinf8$schi; a.isAudioEncryption = true; e1.data_format = (_e1$sinf5 = e1.sinf) === null || _e1$sinf5 === void 0 ? void 0 : (_e1$sinf5$frma = _e1$sinf5.frma) === null || _e1$sinf5$frma === void 0 ? void 0 : _e1$sinf5$frma.data_format; e1.default_KID = (_e1$sinf6 = e1.sinf) === null || _e1$sinf6 === void 0 ? void 0 : (_e1$sinf6$schi = _e1$sinf6.schi) === null || _e1$sinf6$schi === void 0 ? void 0 : _e1$sinf6$schi.tenc.default_KID; e1.default_IsEncrypted = (_e1$sinf7 = e1.sinf) === null || _e1$sinf7 === void 0 ? void 0 : (_e1$sinf7$schi = _e1$sinf7.schi) === null || _e1$sinf7$schi === void 0 ? void 0 : _e1$sinf7$schi.tenc.default_IsEncrypted; e1.default_IV_size = (_e1$sinf8 = e1.sinf) === null || _e1$sinf8 === void 0 ? void 0 : (_e1$sinf8$schi = _e1$sinf8.schi) === null || _e1$sinf8$schi === void 0 ? void 0 : _e1$sinf8$schi.tenc.default_IV_size; a.audioSenc = aTrack.mdia.minf.stbl.senc && aTrack.mdia.minf.stbl.senc.samples; a.useEME = moov.useEME; a.kidValue = moov.kidValue; a.enca = e1; } } audioTrack && (audioTrack.isVideoEncryption = videoTrack ? videoTrack.isVideoEncryption : false); videoTrack && (videoTrack.isAudioEncryption = audioTrack ? audioTrack.isAudioEncryption : false); if (videoTrack !== null && videoTrack !== void 0 && videoTrack.encv || audioTrack !== null && audioTrack !== void 0 && audioTrack.enca) { var _videoTrack$encv, _audioTrack$enca; const vkid = videoTrack === null || videoTrack === void 0 ? void 0 : (_videoTrack$encv = videoTrack.encv) === null || _videoTrack$encv === void 0 ? void 0 : _videoTrack$encv.default_KID; const akid = audioTrack === null || audioTrack === void 0 ? void 0 : (_audioTrack$enca = audioTrack.enca) === null || _audioTrack$enca === void 0 ? void 0 : _audioTrack$enca.default_KID; const kid = vkid || akid ? (vkid || akid).join('') : null; videoTrack && (videoTrack.kid = kid); audioTrack && (audioTrack.kid = kid); } videoTrack && (videoTrack.flags = 0xf01); audioTrack && (audioTrack.flags = 0x701); return { videoTrack, audioTrack }; } static evaluateDefaultDuration(videoTrack, audioTrack, videoSampleCount) { var _audioTrack$samples; const audioSampleCount = audioTrack === null || audioTrack === void 0 ? void 0 : (_audioTrack$samples = audioTrack.samples) === null || _audioTrack$samples === void 0 ? void 0 : _audioTrack$samples.length; // audio if (!audioSampleCount) return 1024; const segmentDuration = 1024 * audioSampleCount / audioTrack.timescale; return segmentDuration * videoTrack.timescale / videoSampleCount; } static moofToSamples(moof, videoTrack, audioTrack) { const ret = {}; if (moof.mfhd) { if (videoTrack) videoTrack.sequenceNumber = moof.mfhd.sequenceNumber; if (audioTrack) audioTrack.sequenceNumber = moof.mfhd.sequenceNumber; } moof.traf.forEach(_ref3 => { let { tfhd, tfdt, trun } = _ref3; if (!tfhd || !trun) return; if (tfdt) { if (videoTrack && videoTrack.id === tfhd.trackId) videoTrack.baseMediaDecodeTime = tfdt.baseMediaDecodeTime; if (audioTrack && audioTrack.id === tfhd.trackId) audioTrack.baseMediaDecodeTime = tfdt.baseMediaDecodeTime; } const defaultSize = tfhd.defaultSampleSize || 0; const defaultDuration = tfhd.defaultSampleDuration || MP4Parser.evaluateDefaultDuration(videoTrack, audioTrack, trun.samples.length || trun.sampleCount); let offset = trun.dataOffset || 0; let dts = 0; let gopId = -1; if (!trun.samples.length && trun.sampleCount) { ret[tfhd.trackId] = []; for (let i = 0; i < trun.sampleCount; i++) { ret[tfhd.trackId].push({ offset, dts, duration: defaultDuration, size: defaultSize }); dts += defaultDuration; offset += defaultSize; } } else { ret[tfhd.trackId] = trun.samples.map((s, index) => { s = { offset, dts, pts: dts + (s.cts || 0), duration: s.duration || defaultDuration, size: s.size || defaultSize, gopId, keyframe: index === 0 || s.flags !== null && s.flags !== undefined && (s.flags & 65536) >>> 0 !== 65536 }; if (s.keyframe) { gopId++; s.gopId = gopId; } dts += s.duration; offset += s.size; return s; }); } }); return ret; } static moovToSamples(moov) { const tracks = moov.trak; if (!tracks || !tracks.length) return; const vTrack = tracks.find(t => { var _t$mdia3, _t$mdia3$hdlr; return ((_t$mdia3 = t.mdia) === null || _t$mdia3 === void 0 ? void 0 : (_t$mdia3$hdlr = _t$mdia3.hdlr) === null || _t$mdia3$hdlr === void 0 ? void 0 : _t$mdia3$hdlr.handlerType) === 'vide'; }); const aTrack = tracks.find(t => { var _t$mdia4, _t$mdia4$hdlr; return ((_t$mdia4 = t.mdia) === null || _t$mdia4 === void 0 ? void 0 : (_t$mdia4$hdlr = _t$mdia4.hdlr) === null || _t$mdia4$hdlr === void 0 ? void 0 : _t$mdia4$hdlr.handlerType) === 'soun'; }); if (!vTrack && !aTrack) return; let videoSamples; let audioSamples; if (vTrack) { var _vTrack$mdia3, _vTrack$mdia3$minf; const videoStbl = (_vTrack$mdia3 = vTrack.mdia) === null || _vTrack$mdia3 === void 0 ? void 0 : (_vTrack$mdia3$minf = _vTrack$mdia3.minf) === null || _vTrack$mdia3$minf === void 0 ? void 0 : _vTrack$mdia3$minf.stbl; if (!videoStbl) return; const { stts, stsc, stsz, stco, stss, ctts } = videoStbl; if (!stts || !stsc || !stsz || !stco || !stss) return; videoSamples = getSamples(stts, stsc, stsz, stco, ctts, stss); } if (aTrack) { var _aTrack$mdia3, _aTrack$mdia3$minf, _aTrack$mdia$mdhd; const audioStbl = (_aTrack$mdia3 = aTrack.mdia) === null || _aTrack$mdia3 === void 0 ? void 0 : (_aTrack$mdia3$minf = _aTrack$mdia3.minf) === null || _aTrack$mdia3$minf === void 0 ? void 0 : _aTrack$mdia3$minf.stbl; if (!audioStbl) return; const timescale = (_aTrack$mdia$mdhd = aTrack.mdia.mdhd) === null || _aTrack$mdia$mdhd === void 0 ? void 0 : _aTrack$mdia$mdhd.timescale; const { stts, stsc, stsz, stco } = audioStbl; if (!timescale || !stts || !stsc || !stsz || !stco) return; audioSamples = getSamples(stts, stsc, stsz, stco); } return { videoSamples, audioSamples }; } } /** * 将无符号Float32Array数组转化成有符号的Int16Array数组 * @param {Float32Array} input unsinged Float32Array * @return {Int16Array} singed int16 */ function floatTo16BitPCM(input) { let i = input.length; let output = new Int16Array(i); while (i--) { let s = Math.max(-1, Math.min(1, input[i])); output[i] = s < 0 ? s * 0x8000 : s * 0x7FFF; } return output; } /** * 将无符号Float32Array数组转化成有符号的Int8Array数组 * @param {Float32Array} input unsinged Float32Array * @return {Int8Array} singed int8 */ function floatTo8BitPCM(input) { let i = input.length; let output = new Int8Array(i); while (i--) { let s = Math.max(-1, Math.min(1, input[i])); const temp = s < 0 ? s * 0x8000 : s * 0x7FFF; output[i] = parseInt(255 / (65535 / (32768 + temp)), 10); } return output; } class HlsFmp4Loader extends CommonLoader { constructor(player) { super(player); this.player = player; this.TAG_NAME = 'HlsFmp4Loader'; this.tempSampleListInfo = {}; this.isInitVideo = false; this.isInitAudio = false; this.videoTrack = { id: 1, samples: [], sps: [], pps: [], vps: [], codec: '' }; this.audioTrack = { id: 2, samples: [], sampleRate: 0, channelCount: 0, codec: '', codecType: '' }; this.workerClearTimeout = null; this.workerUrl = null; this.loopWorker = null; this._hasCalcFps = false; this._basefps = 25; if (!this.player.isUseMSE()) { this._initLoopWorker(); } player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); if (this.workerUrl) { URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; } if (this.workerClearTimeout) { clearTimeout(this.workerClearTimeout); this.workerClearTimeout = null; } if (this.loopWorker) { this.loopWorker.postMessage({ cmd: 'destroy' }); this.loopWorker.terminate(); this.loopWorker = null; } this._hasCalcFps = false; this.videoTrack = null; this.audioTrack = null; this.isInitVideo = false; this.isInitAudio = false; this._basefps = 25; this.player.debug.log(this.TAG_NAME, 'destroy'); } demux(videoData, audioData) { let audioTrack = this.audioTrack; let videoTrack = this.videoTrack; this.checkInitAudio(); this.checkInitVideo(); audioTrack.samples = []; videoTrack.samples = []; if (audioData) { this.player.updateStats({ abps: audioData.byteLength }); if (isFalse(this.isInitAudio)) { const moovBox = MP4Parser.findBox(audioData, ['moov'])[0]; if (!moovBox) { this.player.debug.error(this.TAG_NAME, 'cannot found moov box'); return; } MP4Parser.moovToTrack(MP4Parser.moov(moovBox), null, audioTrack); if (this.checkInitAudio()) { this.player.debug.log(this.TAG_NAME, 'audioData audio init success'); this._sendAccADTSHeader(audioTrack); } } const moofBox = MP4Parser.findBox(audioData, ['moof'])[0]; if (moofBox) { const samples = MP4Parser.moofToSamples(MP4Parser.moof(moofBox), null, audioTrack)[audioTrack.id]; const baseMediaDecodeTime = audioTrack.baseMediaDecodeTime; if (samples) { const baseOffset = moofBox.start; samples.map(x => { x.offset += baseOffset; const sampleData = audioData.subarray(x.offset, x.offset + x.size); const pts = x.dts + baseMediaDecodeTime; const arrayBuffer = new Uint8Array(sampleData.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(sampleData, 2); audioTrack.samples.push({ type: MEDIA_TYPE.audio, pts, dts: pts, payload: arrayBuffer, duration: x.duration, size: arrayBuffer.byteLength }); }); } } } if (videoData) { this.player.updateStats({ vbps: videoData.byteLength }); if (isFalse(this.isInitVideo) && isFalse(this.isInitAudio)) { const moovBox = MP4Parser.findBox(videoData, ['moov'])[0]; if (!moovBox) { throw new Error('cannot found moov box'); } MP4Parser.moovToTrack(MP4Parser.moov(moovBox), videoTrack, audioTrack); if (isFalse(this.isInitAudio) && this.checkInitAudio()) { this.player.debug.log(this.TAG_NAME, 'videoData audio init success', audioTrack); this._sendAccADTSHeader(audioTrack); } if (this.checkInitVideo()) { this.player.debug.log(this.TAG_NAME, 'video init success'); const isHevc = videoTrack.codecType === VIDEO_ENCODE_TYPE.h265; let seqHeader = null; if (isHevc) { // h265 if (videoTrack.sps.length && videoTrack.vps.length && videoTrack.pps.length) { seqHeader = hevcEncoderConfigurationRecord$2({ sps: videoTrack.sps[0], pps: videoTrack.pps[0], vps: videoTrack.vps[0] }); } } else { // h264 if (videoTrack.sps.length && videoTrack.pps.length) { seqHeader = avcEncoderConfigurationRecord$2({ sps: videoTrack.sps[0], pps: videoTrack.pps[0] }); } } if (seqHeader) { this.player.debug.log(this.TAG_NAME, 'seqHeader'); this._doDecodeByHls(seqHeader, MEDIA_TYPE.video, 0, true, 0); } } } const moofBox = MP4Parser.findBox(videoData, ['moof'])[0]; if (moofBox) { const tracks = MP4Parser.moofToSamples(MP4Parser.moof(moofBox), videoTrack, audioTrack); const videoBaseMediaDecodeTime = videoTrack.baseMediaDecodeTime; const audioBaseMediaDecodeTime = audioTrack.baseMediaDecodeTime; const baseOffset = moofBox.start; Object.keys(tracks).forEach(k => { if (videoTrack.id == k) { tracks[k].map(x => { x.offset += baseOffset; const sample = { type: MEDIA_TYPE.video, pts: (x.pts || x.dts) + videoBaseMediaDecodeTime, dts: x.dts + videoBaseMediaDecodeTime, units: [], payload: null, isIFrame: false }; sample.duration = x.duration; sample.gopId = x.gopId; if (x.keyframe) sample.isIFrame = true; const sampleData = videoData.subarray(x.offset, x.offset + x.size); sample.payload = sampleData; videoTrack.samples.push(sample); }); // eslint-disable-next-line eqeqeq } else if (audioTrack.id == k) { tracks[k].map(x => { x.offset += baseOffset; const sampleData = videoData.subarray(x.offset, x.offset + x.size); const pts = x.dts + audioBaseMediaDecodeTime; const arrayBuffer = new Uint8Array(sampleData.length + 2); arrayBuffer.set([0xAF, 0x01], 0); arrayBuffer.set(sampleData, 2); audioTrack.samples.push({ type: MEDIA_TYPE.audio, pts, dts: pts, payload: arrayBuffer, duration: x.duration, // ms size: arrayBuffer.byteLength }); }); } }); } } // 传入到worker 里面 const allSampleList = videoTrack.samples.concat(audioTrack.samples); allSampleList.sort((a, b) => { return a.dts - b.dts; }); allSampleList.forEach(sample => { const arrayBuffer = new Uint8Array(sample.payload); delete sample.payload; if (this.player.isUseMSE()) { if (sample.type === MEDIA_TYPE.video) { // 直接解码 this._doDecodeVideo({ ...sample, payload: arrayBuffer }); } else if (sample.type === MEDIA_TYPE.audio) { this._doDecodeAudio({ ...sample, payload: arrayBuffer }); } } else { this.loopWorker.postMessage({ ...sample, payload: arrayBuffer, cmd: 'sample' }, [arrayBuffer.buffer]); } }); if (isFalse(this._hasCalcFps)) { this._hasCalcFps = true; this._calcDecodeFps(allSampleList); } } checkInitAudio() { this.isInitAudio = !!(this.audioTrack.sampleRate && this.audioTrack.channelCount && this.audioTrack.codec && this.audioTrack.codecType === 'aac'); return this.isInitAudio; } checkInitVideo() { this.isInitVideo = !!(this.videoTrack.pps.length && this.videoTrack.sps.length && this.videoTrack.codec); return this.isInitVideo; } _sendAccADTSHeader(audioTrack) { // const accADTSHeader = aacEncoderConfigurationRecordV2({ profile: audioTrack.objectType, sampleRate: audioTrack.sampleRateIndex, channel: audioTrack.channelCount }); this._doDecodeByHls(accADTSHeader, MEDIA_TYPE.audio, 0, true, 0); } _calcDecodeFps(sampleList) { const _tempSampleTsList = sampleList.map(sample => { return { ts: sample.dts || sample.pts, type: sample.type }; }); const streamVideoFps = calcStreamFpsByBufferList(_tempSampleTsList, MEDIA_TYPE.video); if (streamVideoFps) { this.player.debug.log(this.TAG_NAME, `_calcDecodeFps() video fps is ${streamVideoFps}, update base fps is ${this._basefps}`); this._basefps = streamVideoFps; } this._postMessageToLoopWorker('updateBaseFps', { baseFps: this._basefps }); } _initLoopWorker() { this.player.debug.log(this.TAG_NAME, '_initLoopWorker()'); // worker fun // worker 里面跑interval function LoopWorkerFun() { const MEDIA_TYPE = { audio: 1, video: 2 }; class LoopWorker { constructor() { this.baseFps = 0; this.fpsInterval = null; this.preLoopTimestamp = null; this.startBpsTime = null; this.allSampleList = []; } destroy() { this._clearInterval(); this.baseFps = 0; this.allSampleList = []; this.preLoopTimestamp = null; this.startBpsTime = null; } updateBaseFps(baseFps) { this.baseFps = baseFps; this._clearInterval(); this._startInterval(); } pushSample(sample) { delete sample.cmd; this.allSampleList.push(sample); } _startInterval() { const fragDuration = Math.ceil(1000 / this.baseFps); this.fpsInterval = setInterval(() => { let nowTime = new Date().getTime(); if (!this.preLoopTimestamp) { this.preLoopTimestamp = nowTime; } if (!this.startBpsTime) { this.startBpsTime = nowTime; } const diffTime = nowTime - this.preLoopTimestamp; if (diffTime > fragDuration * 2) { console.warn(`JbPro:[HlsFmp4Loader LoopWorker] loop interval is ${diffTime}ms, more than ${fragDuration} * 2ms`); } this._loop(); this.preLoopTimestamp = new Date().getTime(); if (this.startBpsTime) { const timestamp = nowTime - this.startBpsTime; if (timestamp >= 1000) { this._calcSampleList(); this.startBpsTime = nowTime; } } }, fragDuration); } _clearInterval() { if (this.fpsInterval) { clearInterval(this.fpsInterval); this.fpsInterval = null; } } _calcSampleList() { const tempObj = { buferredDuration: 0, allListLength: this.allSampleList.length, audioListLength: 0, videoListLength: 0 }; this.allSampleList.forEach(sample => { if (sample.type === MEDIA_TYPE.video) { tempObj.videoListLength++; if (sample.duration) { tempObj.buferredDuration += sample.duration; } } else if (sample.type === MEDIA_TYPE.audio) { tempObj.audioListLength++; } }); postMessage({ cmd: 'sampleListInfo', ...tempObj }); } _loop() { let sample = null; if (this.allSampleList.length) { sample = this.allSampleList.shift(); if (sample.type === MEDIA_TYPE.video) { postMessage({ cmd: 'decodeVideo', ...sample }, [sample.payload.buffer]); // check next is audio let tempSample = this.allSampleList[0]; // 这边是检查了所有的audio while (tempSample && tempSample.type === MEDIA_TYPE.audio) { sample = this.allSampleList.shift(); postMessage({ cmd: 'decodeAudio', ...sample }, [sample.payload.buffer]); tempSample = this.allSampleList[0]; } } else if (sample.type === MEDIA_TYPE.audio) { postMessage({ cmd: 'decodeAudio', ...sample }, [sample.payload.buffer]); // check next is video // todo:这个就检查了一个。 if (this.allSampleList.length && this.allSampleList[0].type === MEDIA_TYPE.video) { sample = this.allSampleList.shift(); postMessage({ cmd: 'decodeVideo', ...sample }, [sample.payload.buffer]); } } } } } let loopWorker = new LoopWorker(); self.onmessage = e => { const msg = e.data; switch (msg.cmd) { case 'updateBaseFps': loopWorker.updateBaseFps(msg.baseFps); break; case 'sample': loopWorker.pushSample(msg); break; case 'destroy': loopWorker.destroy(); loopWorker = null; break; } }; } const loopWorkerString = function2String(LoopWorkerFun.toString()); const blob = new Blob([loopWorkerString], { type: "text/javascript" }); const workerUrl = URL.createObjectURL(blob); let loopWorker = new Worker(workerUrl); this.workerUrl = workerUrl; // 必须要释放,不然每次调用内存都明显泄露内存 // chrome 83 file协议下如果直接释放,将会使WebWorker无法启动 this.workerClearTimeout = setTimeout(() => { window.URL.revokeObjectURL(this.workerUrl); this.workerUrl = null; this.workerClearTimeout = null; }, URL_OBJECT_CLEAR_TIME); loopWorker.onmessage = event => { const msg = event.data; switch (msg.cmd) { case 'decodeVideo': this._doDecodeVideo(msg); break; case 'decodeAudio': this._doDecodeAudio(msg); break; case 'sampleListInfo': this.tempSampleListInfo = msg; break; } }; this.loopWorker = loopWorker; } _postMessageToLoopWorker(cmd, options) { if (this.player.isUseMSE()) { return; } if (this.loopWorker) { this.loopWorker.postMessage({ cmd, ...options }); } else { this.player.debug.warn(this.TAG_NAME, `loop worker is not init, can not post message`); } } _doDecodeAudio(sample) { const uint8Array = new Uint8Array(sample.payload); this._doDecodeByHls(uint8Array, MEDIA_TYPE.audio, sample.dts, false, 0); } _doDecodeVideo(sample) { const uint8Array = new Uint8Array(sample.payload); let packet = null; if (sample.isHevc) { // add 5 header packet = hevcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } else { packet = avcEncoderNalePacketNotLength(uint8Array, sample.isIFrame); } this.player.updateStats({ dts: sample.dts }); const cts = sample.pts - sample.dts; this._doDecodeByHls(packet, MEDIA_TYPE.video, sample.dts, sample.isIFrame, cts); } getBuferredDuration() { return this.tempSampleListInfo.buferredDuration || 0; } getSampleListLength() { return this.tempSampleListInfo.allListLength || 0; } getSampleAudioListLength() { return this.tempSampleListInfo.audioListLength || 0; } getSampleVideoListLength() { return this.tempSampleListInfo.videoListLength || 0; } } class Transmuxer { constructor(hls, isMP4) { this.hls = hls; this.player = this.hls.player; this.isMP4 = isMP4; this._initSegmentId = ''; this.TAG_NAME = 'HlsTransmuxer'; if (isMP4) { this._demuxer = new HlsFmp4Loader(this.hls.player); } else { this._demuxer = new HlsTsLoader(this.hls.player); } this.player.debug.log(this.TAG_NAME, `init and isMP4 is ${isMP4}`); } destroy() { if (this._demuxer) { this._demuxer.destroy(); this._demuxer = null; } } transmux(videoChunk, audioChunk, discontinuity, contiguous, startTime, needInit) { this.player.debug.log(this.TAG_NAME, `transmux videoChunk:${videoChunk && videoChunk.byteLength}, audioChunk:${audioChunk && audioChunk.byteLength}, discontinuity:${discontinuity}, contiguous:${contiguous}, startTime:${startTime}, needInit:${needInit}`); const demuxer = this._demuxer; try { if (this.isMP4) { demuxer.demux(videoChunk, audioChunk); } else { demuxer.demuxAndFix(concatUint8Array(videoChunk, audioChunk), discontinuity, contiguous, startTime); } } catch (error) { throw new StreamingError(ERR.DEMUX, ERR.HLS, error); } } } class BufferService { constructor(hls) { this.hls = hls; this.player = hls.player; this._decryptor = new Decryptor(this.hls, this.player); /** @type {Transmuxer} */ this._transmuxer = null; /** @type {MSE} */ this._mse = null; this._softVideo = null; this._sourceCreated = false; this._needInitSegment = true; this._directAppend = false; this.TAG_NAME = 'HlsBufferService'; } async destroy() { this._softVideo = null; if (this._transmuxer) { this._transmuxer.destroy(); this._transmuxer = null; } } get baseDts() { var _this$_transmuxer, _this$_transmuxer$_de; return (_this$_transmuxer = this._transmuxer) === null || _this$_transmuxer === void 0 ? void 0 : (_this$_transmuxer$_de = _this$_transmuxer._demuxer) === null || _this$_transmuxer$_de === void 0 ? void 0 : _this$_transmuxer$_de._baseDts; } get nbSb() { return 0; } async updateDuration(duration) { this.player.debug.log(this.TAG_NAME, 'updateDuration()', duration); } getBuferredDuration() { var _this$_transmuxer2, _this$_transmuxer2$_d; return (_this$_transmuxer2 = this._transmuxer) === null || _this$_transmuxer2 === void 0 ? void 0 : (_this$_transmuxer2$_d = _this$_transmuxer2._demuxer) === null || _this$_transmuxer2$_d === void 0 ? void 0 : _this$_transmuxer2$_d.getBuferredDuration(); } getBufferedSegments() { var _this$_transmuxer3, _this$_transmuxer3$_d; return (_this$_transmuxer3 = this._transmuxer) === null || _this$_transmuxer3 === void 0 ? void 0 : (_this$_transmuxer3$_d = _this$_transmuxer3._demuxer) === null || _this$_transmuxer3$_d === void 0 ? void 0 : _this$_transmuxer3$_d.getSampleListLength(); } getBufferedAudioSegments() { var _this$_transmuxer4, _this$_transmuxer4$_d; return (_this$_transmuxer4 = this._transmuxer) === null || _this$_transmuxer4 === void 0 ? void 0 : (_this$_transmuxer4$_d = _this$_transmuxer4._demuxer) === null || _this$_transmuxer4$_d === void 0 ? void 0 : _this$_transmuxer4$_d.getSampleAudioListLength(); } getBufferedVideoSegments() { var _this$_transmuxer5, _this$_transmuxer5$_d; return (_this$_transmuxer5 = this._transmuxer) === null || _this$_transmuxer5 === void 0 ? void 0 : (_this$_transmuxer5$_d = _this$_transmuxer5._demuxer) === null || _this$_transmuxer5$_d === void 0 ? void 0 : _this$_transmuxer5$_d.getSampleVideoListLength(); } // create source createSource(videoChunk, audioChunk, videoCodec, audioCodec) { if (this._sourceCreated) return; const chunk = videoChunk || audioChunk; if (!chunk) return; if (HlsTsLoader.probe(chunk)) { if (!this._transmuxer) { this._transmuxer = new Transmuxer(this.hls, false); } } else if (MP4Parser.probe(chunk)) { if (!this._transmuxer) { this._transmuxer = new Transmuxer(this.hls, true); } } else { this.player.debug.error(this.TAG_NAME, 'createSource error: chunk is not ts'); } } async appendBuffer(segment, audioSegment, videoChunk, audioChunk, discontinuity, contiguous, startTime) { if (!(videoChunk !== null && videoChunk !== void 0 && videoChunk.length) && !(audioChunk !== null && audioChunk !== void 0 && audioChunk.length)) return; this._needInitSegment || discontinuity; this._transmuxer.transmux(videoChunk, audioChunk, discontinuity, contiguous, startTime, this._needInitSegment || discontinuity); return true; } async clearAllBuffer() { this.player.debug.log(this.TAG_NAME, 'clearAllBuffer'); // if (this._mse) { // return this._mse.clearAllBuffer() // } } decryptBuffer(video, audio) { return this._decryptor.decrypt(video, audio); } async reset() { this._transmuxer = null; this._needInitSegment = true; this._directAppend = false; } async endOfStream() { if (this._softVideo) { this._softVideo.endOfStream(); } } async setLiveSeekableRange(start, end) {} /** * This makes it possible to change codecs or container type mid-stream. * @private */ seamlessSwitch() { this._needInitSegment = true; } } class SeiService { constructor(emitter) { this.emitter = emitter; this._seiSet = new Set(); emitter.on(HLS_EVENTS.SEI, sei => { if (sei) this._seiSet.add(sei); }); } throw(currentTime) { if (currentTime === null || currentTime === undefined || !this._seiSet.size) return; const min = currentTime - 0.2; const max = currentTime + 0.2; const toThrow = []; this._seiSet.forEach(sei => { if (sei.time >= min && sei.time <= max) { toThrow.push(sei); } }); toThrow.forEach(sei => { this._seiSet.delete(sei); // 根据当前视频播放时间抛出 sei,触发该事件表示该 sei 将在当前时间点展示。 this.emitter.emit(HLS_EVENTS.SEI_IN_TIME, sei); }); } reset() { this._seiSet.clear(); } } class Stats { constructor(timescale) { this._timescale = timescale; this.encodeType = ''; // hevc | avc this.audioCodec = ''; this.videoCodec = ''; this.domain = ''; // stream url domain this.fps = 0; this.bitrate = 0; // 最新1s下载数据的码率 this.width = 0; this.height = 0; this.samplerate = 0; this.channelCount = 0; this.gop = 0; // 第一个gop帧数 this._bitsAccumulateSize = 0; this._bitsAccumulateDuration = 0; } getStats() { return { encodeType: this.encodeType, audioCodec: this.audioCodec, videoCodec: this.videoCodec, domain: this.domain, fps: this.fps, bitrate: this.bitrate, width: this.width, height: this.height, samplerate: this.samplerate, channelCount: this.channelCount, gop: this.gop }; } setEncodeType(encode) { this.encodeType = encode; } setFpsFromScriptData(_ref) { var _data$onMetaData; let { data } = _ref; const fps = data === null || data === void 0 ? void 0 : (_data$onMetaData = data.onMetaData) === null || _data$onMetaData === void 0 ? void 0 : _data$onMetaData.framerate; if (fps && fps > 0 && fps < 100) { this.fps = fps; } } setVideoMeta(track) { this.width = track.width; this.height = track.height; this.videoCodec = track.codec; this.encodeType = track.codecType; if (track.fpsNum && track.fpsDen) { const fps = track.fpsNum / track.fpsDen; if (fps > 0 && fps < 100) { this.fps = fps; } } } setAudioMeta(track) { this.audioCodec = track.codec; this.samplerate = track.sampleRate; this.channelCount = track.channelCount; } setDomain(responseUrl) { this.domain = responseUrl.split('/').slice(2, 3)[0]; } updateBitrate(samples) { if (!this.fps || this.fps >= 100) { if (samples.length) { const duration = samples.reduce((a, b) => a += b.duration, 0) / samples.length; this.fps = Math.round(this._timescale / duration); } } samples.forEach(sample => { if (sample.gopId === 1) { this.gop++; } this._bitsAccumulateDuration += sample.duration / (this._timescale / 1000); this._bitsAccumulateSize += sample.units.reduce((a, c) => a += c.length, 0); if (this._bitsAccumulateDuration >= 1000) { this.bitrate = this._bitsAccumulateSize * 8; this._bitsAccumulateDuration = 0; this._bitsAccumulateSize = 0; } }); } } /** * @typedef {Object} StatsInfo * @property {number} downloadSpeed * @property {number} avgSpeed * @property {number} currentTime * @property {number} bufferEnd * @property {number} decodeFps * @property {string} encodeType * @property {string} audioCodec * @property {string} videoCodec * @property {string} domain * @property {number} fps * @property {number} bitrate * @property {number} width * @property {number} height * @property {number} samplerate * @property {number} channelCount * @property {number} gop */ class MediaStatsService { constructor(core) { let timescale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1000; _defineProperty$1(this, "_core", null); _defineProperty$1(this, "_samples", []); this._core = core; this._timescale = timescale; this._stats = new Stats(timescale); this._bindEvents(); } /** @returns {StatsInfo} */ getStats() { var _this$_core, _this$_core2, _this$_core2$speedInf, _this$_core3, _this$_core3$speedInf, _this$_core4, _this$_core4$bufferIn; const { currentTime = 0, decodeFps = 0 } = ((_this$_core = this._core) === null || _this$_core === void 0 ? void 0 : _this$_core.media) || {}; return { ...this._stats.getStats(), downloadSpeed: ((_this$_core2 = this._core) === null || _this$_core2 === void 0 ? void 0 : (_this$_core2$speedInf = _this$_core2.speedInfo) === null || _this$_core2$speedInf === void 0 ? void 0 : _this$_core2$speedInf.call(_this$_core2).speed) || 0, avgSpeed: ((_this$_core3 = this._core) === null || _this$_core3 === void 0 ? void 0 : (_this$_core3$speedInf = _this$_core3.speedInfo) === null || _this$_core3$speedInf === void 0 ? void 0 : _this$_core3$speedInf.call(_this$_core3).avgSpeed) || 0, currentTime, bufferEnd: ((_this$_core4 = this._core) === null || _this$_core4 === void 0 ? void 0 : (_this$_core4$bufferIn = _this$_core4.bufferInfo()) === null || _this$_core4$bufferIn === void 0 ? void 0 : _this$_core4$bufferIn.remaining) || 0, decodeFps }; } _bindEvents() { this._core.on(HLS_EVENTS.DEMUXED_TRACK, track => this._stats.updateBitrate(track.samples)); this._core.on(HLS_EVENTS.FLV_SCRIPT_DATA, data => { this._stats.setFpsFromScriptData(data); }); this._core.on(HLS_EVENTS.METADATA_PARSED, e => { if (e.type === 'video') { this._stats.setVideoMeta(e.track); } else { this._stats.setAudioMeta(e.track); } }); this._core.on(HLS_EVENTS.TTFB, e => { this._stats.setDomain(e.responseUrl); }); } reset() { this._samples = []; this._stats = new Stats(this._timescale); } } class HlsLoader extends Emitter { constructor(player) { let cfg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; super(); _defineProperty$1(this, "_loadSegment", async () => { // this.player.debug.log(this.TAG_NAME, '_loadSegment()', '_segmentProcessing', this._segmentProcessing); if (this._segmentProcessing) { this.player.debug.warn('_loadSegment()', '_segmentProcessing is ture and return'); return; } if (!this._playlist) { this.player.debug.warn('_loadSegment()', 'this._playlist is null and return'); return; } const curSeg = this._playlist.currentSegment; const nextSeg = this._playlist.nextSegment; this.player.debug.log(this.TAG_NAME, '_loadSegment()', 'curSeg', curSeg && curSeg.url, 'nextSeg', nextSeg && nextSeg.url); if (!nextSeg) { this.player.debug.log(this.TAG_NAME, `nextSeg is null and return`); return; } return this._loadSegmentDirect(); }); this.player = player; /** @type {import('./config').HlsOption} */ this.config = null; /** @type {ManifestLoader} */ this._manifestLoader = null; /** @type {SegmentLoader} */ this._segmentLoader = null; /** @type {Playlist} */ this._playlist = null; /** @type {BufferService} */ this._bufferService = null; /** @type {SeiService} */ this._seiService = null; /** @type {MediaStatsService} */ this._stats = null; // this._prevSegSn = null; // this._prevSegCc = null; // this._tickTimer = null; // this._tickInterval = 500; // this._segmentProcessing = false; // this._reloadOnPlay = false; // this._switchUrlOpts = null; // this._disconnectTimer = null; // this.TAG_NAME = 'Hls256'; this.canVideoPlay = false; this.$videoElement = null; this.config = cfg = getConfig$1(cfg); this._manifestLoader = new ManifestLoader(this); this._segmentLoader = new SegmentLoader(this); this._playlist = new Playlist(this); this._bufferService = new BufferService(this); this._seiService = new SeiService(this); this._stats = new MediaStatsService(this, 90000); this.player.debug.log(this.TAG_NAME, 'init'); } async destroy() { this.player.debug.log(this.TAG_NAME, 'destroy()'); this._playlist.reset(); this._segmentLoader.reset(); this._seiService.reset(); await Promise.all([this._clear(), this._bufferService.destroy()]); if (this._manifestLoader) { await this._manifestLoader.destroy(); this._manifestLoader = null; } if (this._segmentLoader) { this._segmentLoader.destroy(); this._segmentLoader = null; } if (this._playlist) { this._playlist.destroy(); this._playlist = null; } this.player.debug.log(this.TAG_NAME, 'destroy end'); } _startTick() { this._stopTick(); this._tickTimer = setTimeout(() => { this._tick(); }, this._tickInterval); } _stopTick() { if (this._tickTimer) { clearTimeout(this._tickTimer); } this._tickTimer = null; } _tick() { if (this.player.isDestroyedOrClosed()) { this.player.debug.log(this.TAG_NAME, '_tick() player is destroyed'); return; } this._startTick(); this._loadSegment(); } get isLive() { return this._playlist.isLive; } get streams() { return this._playlist.streams; } get currentStream() { return this._playlist.currentStream; } get hasSubtitle() { return this._playlist.hasSubtitle; } get baseDts() { var _this$_bufferService; return (_this$_bufferService = this._bufferService) === null || _this$_bufferService === void 0 ? void 0 : _this$_bufferService.baseDts; } speedInfo() { return this._segmentLoader.speedInfo(); } resetBandwidth() { this._segmentLoader.resetBandwidth(); } /** * @returns {Stats} */ getStats() { return this._stats.getStats(); } // load source async loadSource(url) { // this.player.debug.log(this.TAG_NAME, `loadSource() ${url}`); await this._reset(); await this._loadData(url); this._startTick(); return true; } /** * @param {string} url * @private */ async _loadData(url) { // this.player.debug.log(this.TAG_NAME, `_loadData() ${url}`); try { if (url) url = url.trim(); } catch (e) {} if (!url) { throw this._emitError(new StreamingError(ERR.OTHER, ERR.OTHER, null, null, 'm3u8 url is missing')); } // 获取到 manifest 内容 const manifest = await this._loadM3U8(url); // current stream const { currentStream } = this._playlist; // url switching if (this._urlSwitching) { var _this$_switchUrlOpts; if (currentStream.bitrate === 0 && (_this$_switchUrlOpts = this._switchUrlOpts) !== null && _this$_switchUrlOpts !== void 0 && _this$_switchUrlOpts.bitrate) { var _this$_switchUrlOpts2; currentStream.bitrate = (_this$_switchUrlOpts2 = this._switchUrlOpts) === null || _this$_switchUrlOpts2 === void 0 ? void 0 : _this$_switchUrlOpts2.bitrate; } const switchTimePoint = this._getSeamlessSwitchPoint(); this.config.startTime = switchTimePoint; const segIdx = this._playlist.findSegmentIndexByTime(switchTimePoint); const nextSeg = this._playlist.getSegmentByIndex(segIdx + 1); if (nextSeg) { // move to next segment in case of media stall const bufferClearStartPoint = nextSeg.start; this.player.debug.warn(this.TAG_NAME, `clear buffer from ${bufferClearStartPoint}`); // await this._bufferService.removeBuffer(bufferClearStartPoint) } } // if (manifest) { // is live if (this.isLive) { this.player.debug.log(this.TAG_NAME, 'is live'); // 设置live seek able range // 0xffffffff 无限大 // 设置为无限大,可以让播放器一直缓存,不会清除缓存 this._bufferService.setLiveSeekableRange(0, 0xffffffff); // 配置的目标延迟小于首次获取分片总时长 if (this.config.targetLatency < this._playlist.totalDuration) { this.config.targetLatency = this._playlist.totalDuration; this.config.maxLatency = 1.5 * this.config.targetLatency; } // 如果不是master if (!manifest.isMaster) { this._pollM3U8(url); } } else { this.player.debug.log(this.TAG_NAME, `is vod and totalDuration is ${currentStream.totalDuration} s`); // update duration await this._bufferService.updateDuration(currentStream.totalDuration); } } await this._loadSegment(); } /** * @private */ async _loadM3U8(url) { // this.player.debug.log(this.TAG_NAME, `load m3u8: ${url}`); let playlist; try { [playlist] = await this._manifestLoader.load(url); } catch (error) { throw this._emitError(StreamingError.create(error)); } // this.player.debug.log(this.TAG_NAME, 'playlist is', playlist); if (!playlist) { this.player.debug.warn(this.TAG_NAME, '_loadM3U8() playlist is empty'); return; } this._playlist.upsertPlaylist(playlist); if (playlist.isMaster) { var _this$_playlist$curre; if ((_this$_playlist$curre = this._playlist.currentStream.subtitleStreams) !== null && _this$_playlist$curre !== void 0 && _this$_playlist$curre.length) { this.emit(HLS_EVENTS.SUBTITLE_PLAYLIST, { list: this._playlist.currentStream.subtitleStreams }); } // await this._refreshM3U8(); } else { this.player.debug.warn(this.TAG_NAME, '_loadM3U8() is not master playlist'); } this.emit(HLS_EVENTS.STREAM_PARSED); return playlist; } /** * @private 首次更新 master playlist 的 media level */ _refreshM3U8() { var _stream$currentAudioS, _stream$currentSubtit; this.player.debug.log(this.TAG_NAME, '_refreshM3U8()'); const stream = this._playlist.currentStream; if (!stream || !stream.url) { throw this._emitError(StreamingError.create(null, null, new Error('m3u8 url is not defined'))); } const url = stream.url; const audioUrl = (_stream$currentAudioS = stream.currentAudioStream) === null || _stream$currentAudioS === void 0 ? void 0 : _stream$currentAudioS.url; const subtitleUrl = (_stream$currentSubtit = stream.currentSubtitleStream) === null || _stream$currentSubtit === void 0 ? void 0 : _stream$currentSubtit.url; return this._manifestLoader.load(url, audioUrl, subtitleUrl).then(_ref => { let [mediaPlaylist, audioPlaylist, subtitlePlaylist] = _ref; if (!mediaPlaylist) { this.player.debug.warn(this.TAG_NAME, '_refreshM3U8() mediaPlaylist is empty'); return; } this._playlist.upsertPlaylist(mediaPlaylist, audioPlaylist, subtitlePlaylist); if (!this.isLive) { return; } this._pollM3U8(url, audioUrl, subtitleUrl); }).catch(err => { throw this._emitError(StreamingError.create(err)); }); } /** * @private */ _pollM3U8(url, audioUrl, subtitleUrl) { var _this$_playlist$lastS; // this.player.debug.log(this.TAG_NAME, '_pollM3U8()', url, audioUrl, subtitleUrl); let isEmpty = this._playlist.isEmpty; // poll manifest loader this._manifestLoader.poll(url, audioUrl, subtitleUrl, (p1, p2, p3) => { this._playlist.upsertPlaylist(p1, p2, p3); this._playlist.clearOldSegment(); if (p1 && isEmpty && !this._playlist.isEmpty) { this._loadSegment(); } if (isEmpty) { isEmpty = this._playlist.isEmpty; } }, err => { this._emitError(StreamingError.create(err)); }, // 刷新时间 (((_this$_playlist$lastS = this._playlist.lastSegment) === null || _this$_playlist$lastS === void 0 ? void 0 : _this$_playlist$lastS.duration) || 0) * 1000); } /** * @private */ /** * @private */ async _loadSegmentDirect() { // this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect()'); const seg = this._playlist.nextSegment; if (!seg) { this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect() !seg'); return; } let appended = false; let cachedError = null; try { this._segmentProcessing = true; appended = await this._reqAndBufferSegment(seg, this._playlist.getAudioSegment(seg)); } catch (error) { // If an exception is thrown here, other reference functions // need to handle the exception, so the error stops here cachedError = error; } finally { this._segmentProcessing = false; } if (cachedError) { return this._emitError(StreamingError.create(cachedError)); } if (appended) { if (this._urlSwitching) { this._urlSwitching = false; // switchURL 方法调用后,切换 url 成功后触发。 this.emit(HLS_EVENTS.SWITCH_URL_SUCCESS, { url: this.config.url }); } this._playlist.moveSegmentPointer(); this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect()', 'seg.isLast', seg.isLast); if (seg.isLast) { this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect()', 'seg.isLast'); this._end(); } else { this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect()', 'and next _loadSegment()'); this._loadSegment(); } } else { this.player.debug.log(this.TAG_NAME, '_loadSegmentDirect() not appended'); } return appended; } /** * @param {MediaSegment} seg * @param {MediaSegment} audioSeg * @private */ async _reqAndBufferSegment(seg, audioSeg) { this.player.debug.log(this.TAG_NAME, `video seg`, seg && seg.url, 'audio seg', audioSeg && audioSeg.url); const cc = seg ? seg.cc : audioSeg.cc; const discontinuity = this._prevSegCc !== cc; let responses = []; try { responses = await this._segmentLoader.load(seg, audioSeg, discontinuity); } catch (e) { e.fatal = false; this._segmentLoader.error = e; throw e; } if (!responses[0]) { return; } const data = await this._bufferService.decryptBuffer(...responses); if (!data) { this.player.debug.log(this.TAG_NAME, `decryptBuffer return null`); return; } const sn = seg ? seg.sn : audioSeg.sn; const start = seg ? seg.start : audioSeg.start; const stream = this._playlist.currentStream; // this._bufferService.createSource(data[0], data[1], stream === null || stream === void 0 ? void 0 : stream.videoCodec, stream === null || stream === void 0 ? void 0 : stream.audioCodec); // await this._bufferService.appendBuffer(seg, audioSeg, data[0], data[1], discontinuity, this._prevSegSn === sn - 1, start); this._prevSegCc = cc; this._prevSegSn = sn; return true; } /** * @private */ async _clear() { this.player.debug.log(this.TAG_NAME, '_clear()'); clearTimeout(this._disconnectTimer); this._stopTick(); await Promise.all([this._segmentLoader.cancel(), this._manifestLoader.stopPoll()]); this._segmentProcessing = false; } /** * @private */ async _reset() { let reuseMse = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; this.player.debug.log(this.TAG_NAME, '_reset()'); this._reloadOnPlay = false; this._prevSegSn = null; this._prevSegCc = null; this._switchUrlOpts = null; this._playlist.reset(); this._segmentLoader.reset(); this._seiService.reset(); this._stats.reset(); await this._clear(); return this._bufferService.reset(reuseMse); } /** * @private */ _end() { this.player.debug.log(this.TAG_NAME, '_end()'); this._clear(); // this._bufferService.endOfStream() } /** * @param {StreamingError} error * @param {boolean?} endOfStream * @private */ _emitError(error) { var _error$originError; let endOfStream = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; if (((_error$originError = error.originError) === null || _error$originError === void 0 ? void 0 : _error$originError.fatal) === false) { console.warn(error); } else { var _this$media; console.table(error); console.error(error); console.error((_this$media = this.media) === null || _this$media === void 0 ? void 0 : _this$media.error); this._stopTick(); if (this._urlSwitching) { this._urlSwitching = false; // switchURL 方法调用后,切换 url 失败后触发。 this.emit(HLS_EVENTS.SWITCH_URL_FAILED, error); } if (endOfStream) this._end(); this._seiService.reset(); this.emit(HLS_EVENTS.ERROR, error); } return error; } /** * @private */ _getSeamlessSwitchPoint() { const { media } = this; let nextLoadPoint = media.currentTime; if (!media.paused) { var _this$_stats; const segIdx = this._playlist.findSegmentIndexByTime(media.currentTime); const curSeg = this._playlist.getSegmentByIndex(segIdx); const latestKbps = (_this$_stats = this._stats) === null || _this$_stats === void 0 ? void 0 : _this$_stats.getStats().downloadSpeed; // latest download speed if (latestKbps && curSeg) { const delay = curSeg.duration * this._playlist.currentStream.bitrate / latestKbps + 1; nextLoadPoint += delay; } else { nextLoadPoint += 5; } } return nextLoadPoint; } getDemuxBuferredDuration() { return this._bufferService.getBuferredDuration() || 0; } getDemuxBufferedListLength() { return this._bufferService.getBufferedSegments() || 0; } getDemuxAudioBufferedListLength() { return this._bufferService.getBufferedAudioSegments() || 0; } getDemuxVideoBufferedListLength() { return this._bufferService.getBufferedVideoSegments() || 0; } } class HlsDecoder extends Emitter { constructor(player) { super(); _defineProperty$1(this, "TAG_NAME", 'Hls256Decoder'); this.player = player; this.$videoElement = this.player.video.$videoElement; this.hls = null; this.eventsDestroy = []; this.bandwidthEstimateInterval = null; // this.hls = new HlsLoader(player); this._bindEvents(); } async destroy() { this._stopBandwidthEstimateInterval(); if (this.hls) { await this.hls.destroy(); this.hls = null; } if (this.eventsDestroy.length) { this.eventsDestroy.forEach(event => event()); this.eventsDestroy = []; } this.$videoElement = null; this.player.debug.log(this.TAG_NAME, 'destroy'); return true; } _bindEvents() { this.hls.on(HLS_EVENTS.ERROR, e => { this.player.emitError(EVENTS_ERROR.hlsError, e); }); this._startBandwidthEstimateInterval(); } _startBandwidthEstimateInterval() { this._stopBandwidthEstimateInterval(); this.bandwidthEstimateInterval = setInterval(() => { const speedInfo = this.hls.speedInfo(); // avgSpeed is bit/s 要转换成 kbps/s // 感觉计算的有问题 this.player.emit(EVENTS.kBps, (speedInfo.avgSpeed / 1024 / 8).toFixed(2)); this.hls.resetBandwidth(); }, 1 * 1000); } _stopBandwidthEstimateInterval() { if (this.bandwidthEstimateInterval) { clearInterval(this.bandwidthEstimateInterval); this.bandwidthEstimateInterval = null; } } async loadSource(url) { // this.player.debug.log(this.TAG_NAME, `loadSource() ${url}`); this.url = url; await this.hls.loadSource(url); return true; } checkHlsBufferedDelay() { let result = 0; if (this.hls) { result = this.hls.getDemuxBuferredDuration(); } return result; } getDemuxBufferedListLength() { let result = 0; if (this.hls) { result = this.hls.getDemuxBufferedListLength(); } return result; } getDemuxAudioBufferedListLength() { let result = 0; if (this.hls) { result = this.hls.getDemuxAudioBufferedListLength(); } return result; } getDemuxVideoBufferedListLength() { let result = 0; if (this.hls) { result = this.hls.getDemuxVideoBufferedListLength(); } return result; } } function getWebRtcRemoteSdp(url, data, options) { // return ajax({ // url, // type: 'POST', // data, // contentType: 'application/sdp', // processData: false // }) let headers = { 'Content-Type': 'application/sdp' }; if (options.username && options.password) { headers['Authorization'] = 'Basic ' + btoa(options.username + ':' + options.password); } return fetch(url, { method: 'POST', mode: 'cors', // no-cors, *cors, same-origin cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached credentials: 'include', // include, *same-origin, omit redirect: 'follow', // manual, *follow, error referrerPolicy: 'no-referrer', headers, body: data }); } function getWebRtcRemoteSdpForZLM(url, data) { return ajax({ url, type: 'POST', data, contentType: 'text/plain;charset=utf-8', processData: false, dataType: 'json' }); } function getWebRtcRemoteSdpForSRS(url, data) { return fetch(url, { method: 'POST', mode: 'cors', // no-cors, *cors, same-origin cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached credentials: 'include', // include, *same-origin, omit redirect: 'follow', // manual, *follow, error referrerPolicy: 'no-referrer', headers: { 'Content-Type': 'application/sdp' }, body: data }); } function getWebRtcRemoteSdpForOthers(url, data) { return fetch(url, { method: 'POST', mode: 'cors', // no-cors, *cors, same-origin cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached credentials: 'include', // include, *same-origin, omit redirect: 'follow', // manual, *follow, error referrerPolicy: 'no-referrer', headers: { 'Content-Type': 'application/sdp' }, body: data }); } class CommonWebrtc extends Emitter { constructor(player) { super(); this.player = player; this.TAG_NAME = 'CommonWebrtc'; this.rtcPeerConnection = null; this.videoStream = null; this.isDisconnected = false; this.isH264 = this.player.isWebrtcH264(); this.eventsDestroy = []; this.supportVideoFrameCallbackHandle = null; this.isInitInfo = false; this.$videoElement = this.player.video.$videoElement; this.bandwidthEstimateInterval = null; this.rtcPeerTrackVideoReceiver = null; this.rtcPeerTrackAudioReceiver = null; this.prevWebrtcVideoStats = {}; this.prevWebrtcAudioStats = {}; this.currentWebrtcStats = {}; if (this.player._opt.webrtcUseCanvasRender && this.isH264) { this.$videoElement = document.createElement('video'); if (isSafari()) { this.$videoElement.style.position = 'absolute'; } this._initVideoEvents(); } // default muted this.$videoElement.muted = true; this._initRtcPeerConnection(); } destroy() { this.isDisconnected = false; this.isInitInfo = false; this.prevWebrtcVideoStats = {}; this.currentWebrtcStats = {}; this.rtcPeerTrackVideoReceiver = null; this.rtcPeerTrackAudioReceiver = null; this._stopBandwidthEstimateInterval(); if (this.supportVideoFrameCallbackHandle && this.$videoElement) { this.$videoElement.cancelVideoFrameCallback(this.supportVideoFrameCallbackHandle); this.supportVideoFrameCallbackHandle = null; } if (this.eventsDestroy.length) { this.eventsDestroy.forEach(event => event()); this.eventsDestroy = []; } if (this.isH264) { if (this.videoStream) { this.videoStream.getTracks().forEach(track => track.stop()); this.videoStream = null; } this.$videoElement.srcObject = null; } if (this.rtcPeerConnection) { this.rtcPeerConnection.onicecandidate = noop$3; this.rtcPeerConnection.ontrack = noop$3; this.rtcPeerConnection.onconnectionstatechange = noop$3; this.rtcPeerConnection.ondatachannel = noop$3; this.rtcPeerConnection.close(); this.rtcPeerConnection = null; } } _initVideoEvents() { const { proxy } = this.player.events; const canPlayDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.canplay, () => { this.player.debug.log(this.TAG_NAME, 'video canplay'); this.$videoElement.play().then(() => { this.player.debug.log(this.TAG_NAME, 'video play'); this._startCanvasRender(); this._initRenderSize(); }).catch(e => { this.player.debug.warn(this.TAG_NAME, 'video play error ', e); }); }); const waitingDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.waiting, () => { // this.player.emit(EVENTS.videoWaiting); this.player.debug.log('HlsDecoder', 'video waiting'); }); const timeUpdateDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.timeUpdate, event => { const timeStamp = parseInt(event.timeStamp, 10); this.player.handleRender(); this.player.updateStats({ ts: timeStamp }); // this.player.emit(EVENTS.videoTimeUpdate, timeStamp); // check video is playing if (this.$videoElement.paused) { this.player.debug.warn('HlsDecoder', 'video is paused and next try to replay'); this.$videoElement.play().then(() => { this.player.debug.log('HlsDecoder', 'video is paused and replay success'); }).catch(e => { this.player.debug.warn('HlsDecoder', 'video is paused and replay error ', e); }); } }); const rateChangeDestroy = proxy(this.$videoElement, VIDEO_ELEMENT_EVENTS.ratechange, () => { this.player.debug.log('HlsDecoder', 'video playback Rate change', this.$videoElement && this.$videoElement.playbackRate); }); this.eventsDestroy.push(canPlayDestroy, waitingDestroy, timeUpdateDestroy, rateChangeDestroy); } _initRtcPeerConnection() { const rtcPeerConnection = new RTCPeerConnection(); const player = this.player; rtcPeerConnection.addTransceiver("video", { direction: "recvonly" }); rtcPeerConnection.addTransceiver("audio", { direction: "recvonly" }); rtcPeerConnection.onsignalingstatechange = e => { this.player.debug.log(this.TAG_NAME, 'onsignalingstatechange', e); }; // ICE代理的状态及其与ICE服务器(STUN、TURN)的连接 rtcPeerConnection.oniceconnectionstatechange = e => { this.player.debug.log(this.TAG_NAME, 'oniceconnectionstatechange', rtcPeerConnection.iceConnectionState); const iceConnectionState = rtcPeerConnection.iceConnectionState; this.player.emit(EVENTS.webrtcOnIceConnectionStateChange, iceConnectionState); this.isDisconnected = iceConnectionState === 'disconnected'; switch (rtcPeerConnection.iceConnectionState) { case "new": // 建立ICE连接 break; case "checking": // 收集候选 break; case "closed": // 断开ICE break; case "failed": player.emit(EVENTS.webrtcFailed); break; case "disconnected": // 网络波动或者串流断开 player.emit(EVENTS.webrtcDisconnect); break; case "connected": // 匹配到可用候选 break; case "completed": // 匹配完成,连接建立 break; case "closed": player.emit(EVENTS.webrtcClosed); break; } }; rtcPeerConnection.onicecandidate = event => { this.player.debug.log(this.TAG_NAME, 'onicecandidate', event); if (event.candidate) { this.player.debug.log(this.TAG_NAME, 'Remote ICE candidate: ', event.candidate.candidate); // Send the candidate to the remote peer } }; rtcPeerConnection.ontrack = event => { this.player.debug.log(this.TAG_NAME, 'ontrack', event); if (event.track.kind === 'video') { this.player.debug.log(this.TAG_NAME, 'ontrack video'); this.rtcPeerTrackVideoReceiver = rtcPeerConnection.getReceivers().find(function (receiver) { return receiver.track === event.track; }); if (this.rtcPeerTrackVideoReceiver) { this._startBandwidthEstimateInterval(); } let videoStream = event.streams[0]; this.$videoElement.autoplay = true; this.$videoElement.srcObject = videoStream; this.videoStream = videoStream; } else if (event.track.kind === 'audio') { this.player.debug.log(this.TAG_NAME, 'ontrack audio'); this.rtcPeerTrackAudioReceiver = rtcPeerConnection.getReceivers().find(function (receiver) { return receiver.track === event.track; }); if (this.rtcPeerTrackAudioReceiver) { this._startBandwidthEstimateInterval(); } } }; rtcPeerConnection.onicecandidateerror = event => { this.player.debug.log(this.TAG_NAME, 'onicecandidateerror', event); this.player.emitError(EVENTS_ERROR.webrtcIceCandidateError, event); }; // rtc ice所有传输组件(RTCIceTransport or RTCDtlsTransport(底层传输)类型)的聚合状态 rtcPeerConnection.onconnectionstatechange = event => { this.player.debug.log(this.TAG_NAME, 'onconnectionstatechange', event); this.player.emit(EVENTS.webrtcOnConnectionStateChange, rtcPeerConnection.connectionState); switch (rtcPeerConnection.connectionState) { case "new": // 至少有一个的ICE传输组件(RTICETransport或RTCDTLTransport对象)处于new状态, break; case "connecting": // 一个或多个ICE传输组件目前正在建立连接; break; case "connected": // 至少有一个ICE传输组件connected或completed状态 // 所有ICE连接要么在使用中(connected或completed),要么closed; break; case "disconnected": // 至少一个ICE传输组件处于断开状态, // 其他都不是failed、connecting或checking状态 break; case "failed": // ICE传输组件处于failed状态. if (this.isDisconnected) { player.emit(EVENTS.webrtcFailed); } break; } }; this.rtcPeerConnection = rtcPeerConnection; } _startBandwidthEstimateInterval() { this.player.debug.log(this.TAG_NAME, '_startBandwidthEstimateInterval'); this._stopBandwidthEstimateInterval(); this.bandwidthEstimateInterval = setInterval(() => { // video if (this.rtcPeerTrackVideoReceiver) { this.rtcPeerTrackVideoReceiver.getStats().then(stats => { let stateContent = {}; let rate = 0; stats.forEach(report => { if (report) { switch (report.type) { case WEBRTC_STATS_TYPE.CandidatePair: if (report.state === 'succeeded' || report.bytesReceived) { this.currentWebrtcStats.timestamp = report.timestamp; this.currentWebrtcStats.rtt = report.currentRoundTripTime || -1; this.currentWebrtcStats.bytesReceived = report.bytesReceived || 0; this.currentWebrtcStats.bytesSent = report.bytesSent || 0; } // break; case WEBRTC_STATS_TYPE.RemoteCandidate: this.currentWebrtcStats.remoteCandidate = report || {}; break; case WEBRTC_STATS_TYPE.LocalCandidate: this.currentWebrtcStats.localCandidate = report || {}; break; case WEBRTC_STATS_TYPE.InboundRtp: this.currentWebrtcStats.lastTimeStamp = report.timestamp; const timestampDiff = ((report.timestamp || 0) - (this.prevWebrtcVideoStats.timestamp || 0)) / 1000; const bytesReceivedDiff = Number(report.bytesReceived || 0) - Number(this.prevWebrtcVideoStats.bytesReceived || 0); const bytesReceivedPerSecond = Math.floor(bytesReceivedDiff / timestampDiff); // 视频码率 const vbps = bytesReceivedPerSecond; rate += vbps; stateContent.vbps = vbps; this.prevWebrtcVideoStats = report; break; case WEBRTC_STATS_TYPE.Track: if (report.frameWidth && report.frameHeight) { this.currentWebrtcStats.frameWidth = report.frameWidth || 0; this.currentWebrtcStats.frameHeight = report.frameHeight || 0; } break; } } }); if (this.rtcPeerTrackAudioReceiver) { this.rtcPeerTrackAudioReceiver.getStats().then(stats => { stats.forEach(report => { if (report) { switch (report.type) { case WEBRTC_STATS_TYPE.InboundRtp: this.currentWebrtcStats.lastTimeStamp = report.timestamp; const timestampDiff = ((report.timestamp || 0) - (this.prevWebrtcAudioStats.timestamp || 0)) / 1000; const bytesReceivedDiff = Number(report.bytesReceived || 0) - Number(this.prevWebrtcAudioStats.bytesReceived || 0); const bytesReceivedPerSecond = Math.floor(bytesReceivedDiff / timestampDiff); // 音频码率 const abps = bytesReceivedPerSecond; rate += abps; stateContent.abps = abps; this.prevWebrtcAudioStats = report; break; } } }); this.player.updateStats(stateContent); this.player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); } else { this.player.updateStats(stateContent); this.player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); } }); } }, 1 * 1000); } _stopBandwidthEstimateInterval() { this.player.debug.log(this.TAG_NAME, '_stopBandwidthEstimateInterval'); if (this.bandwidthEstimateInterval) { clearInterval(this.bandwidthEstimateInterval); this.bandwidthEstimateInterval = null; } } _startCanvasRender() { if (supportVideoFrameCallback()) { this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } else { this._stopCanvasRender(); this.canvasRenderInterval = setInterval(() => { this.player.video.render({ $video: this.$videoElement, ts: 0 }); }, 1000 / 25); } } _stopCanvasRender() { if (this.canvasRenderInterval) { clearInterval(this.canvasRenderInterval); this.canvasRenderInterval = null; } } videoFrameCallback(now) { let metaData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (this.player.isDestroyedOrClosed()) { this.player.debug.log(this.TAG_NAME, 'videoFrameCallback() player is destroyed'); return; } this.player.video.render({ $video: this.$videoElement, ts: metaData.mediaTime || 0 }); this.player.updateStats({ dts: metaData.mediaTime || 0 }); this.supportVideoFrameCallbackHandle = this.$videoElement.requestVideoFrameCallback(this.videoFrameCallback.bind(this)); } _initRenderSize() { if (!this.isInitInfo) { this.player.video.updateVideoInfo({ width: this.$videoElement.videoWidth, height: this.$videoElement.videoHeight }); this.player.video.initCanvasViewSize(); this.isInitInfo = true; } } getVideoCurrentTime() { let result = 0; if (this.$videoElement) { result = this.$videoElement.currentTime; } return result; } } class WebrtcDecoder extends CommonWebrtc { constructor(player) { super(player); this.rtcPeerConnectionDataChannel = null; if (this.player.isWebrtcH265()) { // this.streamRate = calculationRate(rate => { player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); }); } this.TAG_NAME = 'WebrtcForM7SDecoder'; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.stopStreamRateInterval(); if (this.rtcPeerConnectionDataChannel) { this.rtcPeerConnectionDataChannel.onopen = noop$3; this.rtcPeerConnectionDataChannel.onclose = noop$3; this.rtcPeerConnectionDataChannel.onmessage = noop$3; this.rtcPeerConnectionDataChannel.close(); this.rtcPeerConnectionDataChannel = null; } this.player.debug.log(this.TAG_NAME, 'destroy'); } _initRtcPeerConnection() { const rtcPeerConnection = new RTCPeerConnection(); const player = this.player; rtcPeerConnection.addTransceiver("video", { direction: "recvonly" }); rtcPeerConnection.addTransceiver("audio", { direction: "recvonly" }); rtcPeerConnection.onsignalingstatechange = e => { this.player.debug.log(this.TAG_NAME, "onsignalingstatechange", e); }; // ICE代理的状态及其与ICE服务器(STUN、TURN)的连接 rtcPeerConnection.oniceconnectionstatechange = e => { this.player.debug.log(this.TAG_NAME, "oniceconnectionstatechange", rtcPeerConnection.iceConnectionState); const iceConnectionState = rtcPeerConnection.iceConnectionState; this.player.emit(EVENTS.webrtcOnIceConnectionStateChange, iceConnectionState); this.isDisconnected = iceConnectionState === 'disconnected'; switch (rtcPeerConnection.iceConnectionState) { case "new": // 建立ICE连接 break; case "checking": // 收集候选 break; case "closed": // 断开ICE break; case "failed": // 没有合适候选 player.emit(EVENTS.webrtcFailed); break; case "disconnected": // 网络波动或者串流断开 player.emit(EVENTS.webrtcDisconnect); break; case "connected": // 匹配到可用候选 break; case "completed": // 匹配完成,连接建立 break; case 'closed': player.emit(EVENTS.webrtcClosed); break; } }; // ICE代理的状态及其与ICE服务器(STUN、TURN)的连接 rtcPeerConnection.onicecandidate = event => { this.player.debug.log(this.TAG_NAME, "onicecandidate", event); if (event.candidate) { this.player.debug.log(this.TAG_NAME, 'Remote ICE candidate: ', event.candidate.candidate); // Send the candidate to the remote peer } }; rtcPeerConnection.ontrack = event => { this.player.debug.log(this.TAG_NAME, "ontrack", event); const $video = player.video.$videoElement; if (player.isWebrtcH264()) { if (event.track.kind === 'video') { this.player.debug.log(this.TAG_NAME, 'ontrack video'); this.rtcPeerTrackVideoReceiver = rtcPeerConnection.getReceivers().find(function (receiver) { return receiver.track === event.track; }); if (this.rtcPeerTrackVideoReceiver) { this._startBandwidthEstimateInterval(); } let videoStream = event.streams[0]; $video.autoplay = true; $video.srcObject = videoStream; this.videoStream = videoStream; } else if (event.track.kind === 'audio') { this.player.debug.log(this.TAG_NAME, 'ontrack audio'); this.rtcPeerTrackAudioReceiver = rtcPeerConnection.getReceivers().find(function (receiver) { return receiver.track === event.track; }); if (this.rtcPeerTrackAudioReceiver) { this._startBandwidthEstimateInterval(); } } } }; rtcPeerConnection.onicecandidateerror = event => { this.player.debug.log(this.TAG_NAME, 'onicecandidateerror', event); this.player.emitError(EVENTS_ERROR.webrtcIceCandidateError, event); }; // rtc ice所有传输组件(RTCIceTransport or RTCDtlsTransport(底层传输)类型)的聚合状态 rtcPeerConnection.onconnectionstatechange = event => { player.debug.log(this.TAG_NAME, `sdp connect status ${rtcPeerConnection.connectionState}`); switch (rtcPeerConnection.connectionState) { case "new": // 至少有一个的ICE传输组件(RTICETransport或RTCDTLTransport对象)处于new状态, break; case "connecting": // 一个或多个ICE传输组件目前正在建立连接; break; case "connected": // 一个或多个ICE传输组件目前正在建立连接; // 所有ICE连接要么在使用中(connected或completed),要么closed; break; case "disconnected": // 至少一个ICE传输组件处于断开状态, // 其他都不是failed、connecting或checking状态 break; case "failed": // ICE传输组件处于failed状态. if (this.isDisconnected) { player.emit(EVENTS.webrtcFailed); } break; } }; // for h265 rtcPeerConnection.ondatachannel = event => { // RTCDataChannel const channel = event.channel; this.player.debug.log(this.TAG_NAME, 'ondatachannel'); channel.onopen = () => { this.player.debug.log(this.TAG_NAME, 'ondatachannel and onopen'); }; channel.onmessage = event => { const data = event.data; if (this.player.isWebrtcH264()) { this.player.debug.warn(this.TAG_NAME, 'ondatachannel is H265 but decode is h264 so emit webrtcStreamH265 '); this.player.emit(EVENTS.webrtcStreamH265); return; } // this.player.debug.log(this.TAG_NAME, 'ondatachannel,onmessage', data.byteLength); if (this.player.isDestroyedOrClosed()) { this.player.debug.warn(this.TAG_NAME, 'ondatachannel and player is destroyed'); return; } this.streamRate && this.streamRate(data.byteLength); if (this.player.demux) { this.player.demux.dispatch(data); } }; channel.onclose = () => { this.player.debug.warn(this.TAG_NAME, 'ondatachannel and onclose'); }; this.rtcPeerConnectionDataChannel = channel; }; const signalChannel = rtcPeerConnection.createDataChannel("signal"); signalChannel.onmessage = event => { this.player.debug.log(this.TAG_NAME, 'signalChannel,onmessage', event); const signal = JSON.parse(event.data); switch (signal.type) { } }; this.rtcPeerConnection = rtcPeerConnection; } startStreamRateInterval() { this.stopStreamRateInterval(); this.streamRateInterval = setInterval(() => { this.streamRate && this.streamRate(0); }, 1000); } stopStreamRateInterval() { if (this.streamRateInterval) { clearInterval(this.streamRateInterval); this.streamRateInterval = null; } } loadSource(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { const rtcPeerConnection = this.rtcPeerConnection; rtcPeerConnection.createOffer().then(res => { rtcPeerConnection.setLocalDescription(res); this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp loadSource`); getWebRtcRemoteSdp(url, res.sdp, options).then(response => { response.text().then(sdp => { this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp response`); if (sdp) { rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({ type: "answer", sdp })).then(() => { if (this.player.isWebrtcH265()) { this.startStreamRateInterval(); } resolve(); }).catch(e => { reject(e); }); } else { reject('sdp is null'); } }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource response.text() error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource getWebRtcRemoteSdp response error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource rtcPeerConnection.createOffer() error`, e); reject(e); }); }); } } class WebrtcForZLMDecoder extends CommonWebrtc { constructor(player) { super(player); this.TAG_NAME = 'WebrtcForZLMDecoder'; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.TAG_NAME, 'destroy'); } loadSource(url) { return new Promise((resolve, reject) => { const rtcPeerConnection = this.rtcPeerConnection; rtcPeerConnection.createOffer().then(res => { rtcPeerConnection.setLocalDescription(res); this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp loadSource`); getWebRtcRemoteSdpForZLM(url, res.sdp).then(response => { this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp response and code is ${response.code}`); const ret = response; if (ret && ret.code !== 0) { return reject(ret.msg); } if (ret && ret.sdp) { rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({ type: "answer", sdp: ret.sdp })).then(() => { resolve(); }).catch(e => { reject(e); }); } else { reject('sdp is null'); } }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource getWebRtcRemoteSdp response error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource rtcPeerConnection.createOffer() error`, e); reject(e); }); }); } } class Playback extends Emitter { constructor(player, config) { super(); this.player = player; this.player.$container.classList.add('jb-pro-container-playback'); this._showPrecision = null; this._startTime = null; this._playStartTime = null; this._playingTimestamp = null; this._fps = parseInt(config.fps, 10) || player._opt.playbackFps; this._isUseFpsRender = isTrue(config.isUseFpsRender) ? true : false; this._rate = 1; // 播放倍率 this._audioTimestamp = 0; this._videoTimestamp = 0; this.controlType = config.controlType || PLAYBACK_CONTROL_TYPE.normal; if (config.controlType && [PLAYBACK_CONTROL_TYPE.normal, PLAYBACK_CONTROL_TYPE.simple].indexOf(config.controlType) === -1) { this.player.debug.warn('Playback', 'constructor()', 'controlType is not in [normal,simple]', config.controlType); this.controlType = PLAYBACK_CONTROL_TYPE.normal; } this._currentLocalTimestamp = 0; this._localOneFrameTimestamp = config.localOneFrameTimestamp || 40; this._localCalculateTimeInterval = null; this._isUseLocalCalculateTime = isTrue(config.isUseLocalCalculateTime) ? true : false; this._isPlaybackPauseClearCache = isFalse(config.isPlaybackPauseClearCache) ? false : true; this._isCacheBeforeDecodeForFpsRender = isTrue(config.isCacheBeforeDecodeForFpsRender) ? true : false; this._startfpsTime = null; this._startFpsTimestamp = null; this._checkStatsInterval = null; this._playbackTs = 0; this._renderFps = 0; // if (this._isUseLocalCalculateTime) { this._startLocalCalculateTime(); } else { this._listen(); } // this.playbackList = []; this._playbackListStartTimestamp = null; this._totalDuration = 0; if (config.controlType === PLAYBACK_CONTROL_TYPE.normal) { this.initPlaybackList(config.playList, config.showPrecision, config.startTime); } else if (config.controlType === PLAYBACK_CONTROL_TYPE.simple) { // 单位 s if (config.duration) { // 变成 ms this._totalDuration = config.duration * 1000; } let startTime = config.startTime || 0; if (startTime > this.totalDuration) { startTime = this.totalDuration; } this.setStartTime(startTime); } this.player.on(EVENTS.playbackPause, flag => { if (flag) { this.pause(); } else { this.resume(); } }); const initObj = { fps: this._fps, isUseFpsRender: this._isUseFpsRender, localOneFrameTimestamp: this._localOneFrameTimestamp, isUseLocalCalculateTime: this._isUseLocalCalculateTime, uiUsePlaybackPause: config.uiUsePlaybackPause, showControl: config.showControl }; player.debug.log('Playback', 'init', JSON.stringify(initObj)); } destroy() { this._startTime = null; this._showPrecision = null; this._playStartTime = null; this._playingTimestamp = null; this._totalDuration = 0; this._audioTimestamp = 0; this._videoTimestamp = 0; this._fps = null; this._isUseFpsRender = false; this._rate = 1; // 播放倍率 this.playbackList = []; this._playbackListStartTimestamp = null; this._localCalculateTimeInterval = null; this._currentLocalTimestamp = 0; this._startfpsTime = null; this._startFpsTimestamp = null; this._renderFps = 0; this._playbackTs = 0; this._stopLocalCalculateTime(); this.clearStatsInterval(); if (this.player.$container) { this.player.$container.classList.remove('jb-pro-container-playback'); } this.off(); this.player.debug.log('Playback', 'destroy'); } _listen() { // this.player.on(EVENTS.stats, stats => { const timestamp = stats.ts; if (!this._playStartTime) { this._playStartTime = timestamp - 1000; } // todo:这了会存在一个问题 let playingTimestamp = timestamp - this._playStartTime; this.setPlayingTimestamp(playingTimestamp); }); } pause() { this.clearStatsInterval(); } resume() { this.startCheckStatsInterval(); } updateStats() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (!this._startFpsTimestamp) { this._startFpsTimestamp = now$2(); } if (isNotEmpty(options.ts)) { this.player.updateStats({ fps: true, ts: options.ts }); this._playbackTs = options.ts; if (!this._startfpsTime) { this._startfpsTime = options.ts; } this._renderFps += 1; } const _now = now$2(); const timestamp = _now - this._startFpsTimestamp; // update fps if (timestamp < 1 * 1000) { return; } let dataTimestamp = null; if (this._startfpsTime) { dataTimestamp = this._playbackTs - this._startfpsTime; } this.player.emit(EVENTS.playbackStats, { fps: this._renderFps, rate: this.rate, start: this._startfpsTime, end: this._playbackTs, timestamp: timestamp, dataTimestamp: dataTimestamp, audioBufferSize: this.player.audio ? this.player.audio.bufferSize : 0, videoBufferSize: this.player.video ? this.player.video.bufferSize : 0, ts: this._playbackTs }); this._renderFps = 0; this._startfpsTime = this._playbackTs; this._startFpsTimestamp = _now; } updateLocalOneFrameTimestamp(timestamp) { this._localOneFrameTimestamp = timestamp; } _startLocalCalculateTime() { this._stopLocalCalculateTime(); this._localCalculateTimeInterval = setInterval(() => { const timestamp = this._currentLocalTimestamp; if (!this._playStartTime) { this._playStartTime = timestamp - 1000; } let playingTimestamp = timestamp - this._playStartTime; this.setPlayingTimestamp(playingTimestamp); }, 1000); } startCheckStatsInterval() { this.clearStatsInterval(); this._checkStatsInterval = setInterval(() => { this.updateStats(); }, 1000); } _stopLocalCalculateTime() { if (this._localCalculateTimeInterval) { clearInterval(this._localCalculateTimeInterval); this._localCalculateTimeInterval = null; } } clearStatsInterval() { if (this._checkStatsInterval) { clearInterval(this._checkStatsInterval); this._checkStatsInterval = null; } } // increaseLocalTimestamp() { if (this._isUseLocalCalculateTime) { // todo: 如果变成I帧倍率播放的时候,就会有问题。 this._currentLocalTimestamp += this._localOneFrameTimestamp; } } // initPlaybackList(playList, showPrecision, startTime) { this.playbackList = playList || []; let totalDuration = 0; this.playbackList.forEach((playItem, index) => { if (getStrLength(playItem.start) === 10) { playItem.startTimestamp = playItem.start * 1000; playItem.startTime = parseTime(playItem.startTimestamp); } if (getStrLength(playItem.end) === 10) { playItem.endTimestamp = playItem.end * 1000; playItem.endTime = parseTime(playItem.endTimestamp); } playItem.duration = playItem.end - playItem.start; totalDuration += playItem.duration; }); this._totalDuration = totalDuration; this.player.debug.log('Playback', this.playbackList); if (this.playbackList.length > 0) { const _startTimestamp = this.playbackList[0].startTimestamp; this._playbackListStartTimestamp = _startTimestamp; let _startTime = _startTimestamp; if (startTime) { if (getStrLength(startTime) === 10) { startTime = startTime * 1000; } // 校验下startTime 是否在playList的范围内 if (this._isTimeInPlaybackList(startTime)) { _startTime = startTime; } } // 设置时分秒 this.setStartTime(_startTime); } const _showPrecision = showPrecision || PLAYBACK_CONTROL_TIME_PRECISION.oneHour; // init... this.setShowPrecision(_showPrecision); } // 单位 s get totalDuration() { return (this._totalDuration || 0) / 1000; } get startTime() { return this._startTime || 0; } // normal:set start time 开始时间,默认是 某一天的00:00:00 // simple: ms打头 setStartTime(time) { this._startTime = time; this._playingTimestamp = time; this._playStartTime = null; } setRate(rate) { this._rate = rate; this.player.emit(EVENTS.playbackRateChange, rate); } get fps() { return this._fps; } get rate() { return this._rate; } get isUseFpsRender() { return this._isUseFpsRender; } get isUseLocalCalculateTime() { return this._isUseLocalCalculateTime; } get showPrecision() { return this._showPrecision; } get is60Min() { return this.showPrecision === PLAYBACK_CONTROL_TIME_PRECISION.oneHour; } get is30Min() { return this.showPrecision === PLAYBACK_CONTROL_TIME_PRECISION.halfHour; } get is10Min() { return this.showPrecision === PLAYBACK_CONTROL_TIME_PRECISION.tenMin; } get is5Min() { return this.showPrecision === PLAYBACK_CONTROL_TIME_PRECISION.fiveMin; } get is1Min() { return this.showPrecision === PLAYBACK_CONTROL_TIME_PRECISION.fiveMin; } get isPlaybackPauseClearCache() { return this._isPlaybackPauseClearCache; } get isCacheBeforeDecodeForFpsRender() { return this._isCacheBeforeDecodeForFpsRender; } setShowPrecision(type) { if (!PLAYBACK_CONTROL_TIME_PRECISION_ARRAY.includes(type)) { this.player.debug.warn('Playback', 'setShowPrecision()', 'type is not in PLAYBACK_CONTROL_TIME_PRECISION_ARRAY', type); type = PLAYBACK_CONTROL_TIME_PRECISION.oneHour; } if (this._showPrecision && this._showPrecision === type) { return; } this._showPrecision = type; this.player.emit(EVENTS.playbackPrecision, this._showPrecision, this.playbackList); this.player.emit(EVENTS.playbackShowPrecisionChange, this._showPrecision); } setPlayingTimestamp(ts) { let timestamp; if (this.controlType === PLAYBACK_CONTROL_TYPE.normal) { timestamp = this.startTime + ts; this._playingTimestamp = timestamp; this.player.emit(EVENTS.playbackTime, timestamp); const timeDay = new Date(timestamp); this.player.emit(EVENTS.playbackTimestamp, { ts: timestamp, hour: timeDay.getHours(), min: timeDay.getMinutes(), second: timeDay.getSeconds() }); } else if (this.controlType === PLAYBACK_CONTROL_TYPE.simple) { // 四舍五入到秒 timestamp = this.startTime + Math.round(ts / 1000); if (timestamp > this.totalDuration) { this.player.debug.log('Playback', 'setPlayingTimestamp()', `timestamp ${timestamp} > this.totalDuration ${this.totalDuration}`); timestamp = this.totalDuration; } this._playingTimestamp = timestamp; this.player.emit(EVENTS.playbackTime, timestamp); this.player.emit(EVENTS.playbackTimestamp, { ts: timestamp }); } } get playingTimestamp() { return this._playingTimestamp; } /** * */ narrowPrecision() { const index = PLAYBACK_CONTROL_TIME_PRECISION_ARRAY.indexOf(this.showPrecision); const prev = index - 1; if (prev >= 0) { const item = PLAYBACK_CONTROL_TIME_PRECISION_ARRAY[prev]; this.setShowPrecision(item); } } /** * */ expandPrecision() { const index = PLAYBACK_CONTROL_TIME_PRECISION_ARRAY.indexOf(this.showPrecision); const next = index + 1; if (next <= PLAYBACK_CONTROL_TIME_PRECISION_ARRAY.length - 1) { const item = PLAYBACK_CONTROL_TIME_PRECISION_ARRAY[next]; this.setShowPrecision(item); } } /** * * @param obj */ seek(obj) { this.player.debug.log('Playback', 'seek()', obj); if (this.controlType === PLAYBACK_CONTROL_TYPE.normal) { if (obj.hasRecord === 'true') { let seconds = obj.time; if (obj.type === 'min') { seconds = obj.time * 60; } let result = formatSecondTime(seconds); if (this._playbackListStartTimestamp) { const timestamp = new Date(this._playbackListStartTimestamp).setHours(result.hour, result.min, result.second, 0); result.timestamp = timestamp; const playbackObj = this._findMoreInfoByTimestamp(timestamp); if (result && playbackObj.more) { result.more = playbackObj.more; } } this.player.emit(EVENTS.playbackSeek, result); } } else if (this.controlType === PLAYBACK_CONTROL_TYPE.simple) { let ts = obj.time; this.player.emit(EVENTS.playbackSeek, { ts }); } } currentTimeScroll() { this.player.emit(EVENTS.playbackTimeScroll); } _findMoreInfoByTimestamp(timestamp) { let result = null; this.playbackList.forEach((item, index) => { if (item.startTimestamp <= timestamp && item.endTimestamp >= timestamp) { result = item; } }); return result; } _isTimeInPlaybackList(timestamp) { let result = false; this.playbackList.forEach((item, index) => { if (item.startTimestamp <= timestamp && item.endTimestamp >= timestamp) { result = true; } }); return result; } getControlType() { return this.controlType; } isControlTypeNormal() { return this.controlType === PLAYBACK_CONTROL_TYPE.normal; } isControlTypeSimple() { return this.controlType === PLAYBACK_CONTROL_TYPE.simple; } } class Zoom extends Emitter { constructor(player) { super(); this.player = player; this.TAG_NAME = 'zoom'; this.bindEvents = []; this.isDragging = false; this.currentZoom = 1; this.prevVideoElementStyleTransform = null; this.prevVideoElementStyleScale = null; this.maxScale = 5; this.tempPosition = { x: 0, y: 0 }; this.videoPosition = { left: 0, top: 0 }; const { events: { proxy }, debug } = this.player; this.player.on(EVENTS.zooming, isZooming => { if (isZooming) { this.player.$container.classList.add('jb-pro-zoom-control'); this._bindEvents(); const styleTransform = this.player.video.$videoElement.style.transform; let left = this.player.video.$videoElement.offsetLeft; let top = this.player.video.$videoElement.offsetTop; left = parseFloat(left); top = parseFloat(top); if (left) { this.videoPosition.left = left; } if (top) { this.videoPosition.top = top; } this.prevVideoElementStyleTransform = styleTransform; let scaleStyleMatch = styleTransform.match(/scale\([0-9., ]*\)/g); if (scaleStyleMatch && scaleStyleMatch[0]) { let scaleStyle = scaleStyleMatch[0].replace('scale(', '').replace(')', ''); this.prevVideoElementStyleScale = scaleStyle.split(','); } } else { this.player.$container.classList.remove('jb-pro-zoom-control'); this._unbindEvents(); this._resetVideoPosition(); this.player.$container.style.cursor = 'auto'; let prevVideoElementStyleTransform = this.prevVideoElementStyleTransform; this.player.video.$videoElement.style.transform = prevVideoElementStyleTransform; this.prevVideoElementStyleTransform = null; this.prevVideoElementStyleScale = null; if (isMobile() && this.player._opt.useWebFullScreen) { this.player.resize(); } } }); const mouseUpDestroy = proxy(window, isMobile() ? 'touchend' : 'mouseup', event => { this.handleMouseUp(event); }); this.bindEvents.push(mouseUpDestroy); player.debug.log('zoom', 'init'); } destroy() { this.bindEvents = []; this.isDragging = false; this.currentZoom = 1; this.prevVideoElementStyleTransform = null; this.prevVideoElementStyleScale = null; this.tempPosition = { x: 0, y: 0 }; this.videoPosition = { left: 0, top: 0 }; this.off(); this.player.debug.log('zoom', 'destroy'); } _bindEvents() { const { events: { proxy }, debug } = this.player; const mouseMoveDestroy = proxy(this.player.$container, isMobile() ? 'touchmove' : 'mousemove', e => { this.handleMouseMove(e); }); this.bindEvents.push(mouseMoveDestroy); const mouseDownDestroy = proxy(this.player.$container, isMobile() ? 'touchstart' : 'mousedown', e => { this.handleMouseDown(e); }); this.bindEvents.push(mouseDownDestroy); const mouseUpDestroy = proxy(window, isMobile() ? 'touchend' : 'mouseup', event => { this.handleMouseUp(event); }); this.bindEvents.push(mouseUpDestroy); } _unbindEvents() { this.bindEvents.forEach(fn => { fn && fn(); }); } handleMouseMove(event) { event.stopPropagation(); if (this.isDragging && this.player.zooming) { // preventDefault to prevent the video from being dragged event.preventDefault(); const { posX, posY } = getMousePosition(event); const tempX = this.tempPosition.x - posX; const tempY = this.tempPosition.y - posY; this.videoPosition.left = this.videoPosition.left - tempX; this.videoPosition.top = this.videoPosition.top - tempY; this.tempPosition.x = posX; this.tempPosition.y = posY; this.updateVideoPosition(); } } handleMouseDown(event) { event.stopPropagation(); const target = getTarget(event); if (!this.player.zooming) { return; } if (target.matches('video') || target.matches('canvas')) { // preventDefault to prevent the video from being dragged event.preventDefault(); const { posX, posY } = getMousePosition(event); this.player.$container.style.cursor = 'grabbing'; this.tempPosition.x = posX; this.tempPosition.y = posY; this.isDragging = true; this.player.debug.log('zoom', 'handleMouseDown is dragging true'); } } handleMouseUp(event) { event.stopPropagation(); if (this.isDragging && this.player.zooming) { // preventDefault to prevent the video from being dragged event.preventDefault(); this.tempPosition = { x: 0, y: 0 }; this.isDragging = false; this.player.$container.style.cursor = 'grab'; this.player.debug.log('zoom', 'handleMouseUp is dragging false'); } } // updateVideoPosition() { const $videoElement = this.player.video.$videoElement; $videoElement.style.left = this.videoPosition.left + 'px'; $videoElement.style.top = this.videoPosition.top + 'px'; } // _resetVideoPosition() { // const $videoElement = this.player.video.$videoElement; // $videoElement.style.left = 0 + 'px'; // $videoElement.style.top = 0 + 'px'; this.player.resize(); this.tempPosition = { x: 0, y: 0 }; this.videoPosition = { left: 0, top: 0 }; this.currentZoom = 1; this.isDragging = false; } // narrow narrowPrecision() { if (this.currentZoom <= 1) { return; } this.currentZoom -= 1; this.updateVideoElementScale(); } // expand expandPrecision() { if (this.currentZoom >= this.maxScale) { return; } this.currentZoom += 1; this.updateVideoElementScale(); } updatePrevVideoElementStyleScale(value) { this.prevVideoElementStyleScale = value; } updateVideoElementScale() { const $videoElement = this.player.video.$videoElement; let styleTransform = $videoElement.style.transform; let scaleX = 1; let scaleY = 1; if (this.prevVideoElementStyleScale) { const x = this.prevVideoElementStyleScale[0]; if (x !== undefined) { scaleX = x; scaleY = x; } const y = this.prevVideoElementStyleScale[1]; if (y !== undefined) { scaleY = y; } } scaleY = toNumber(scaleY); scaleX = toNumber(scaleX); const endScaleX = 0.5 * scaleX * (this.currentZoom - 1) + scaleX; const endScaleY = 0.5 * scaleY * (this.currentZoom - 1) + scaleY; let endStyleTransform; if (styleTransform.indexOf('scale(') === -1) { endStyleTransform = styleTransform + ` scale(${endScaleX},${endScaleY})`; } else { endStyleTransform = styleTransform.replace(/scale\([0-9., ]*\)/, `scale(${endScaleX},${endScaleY})`); } this.player.debug.log('zoom', `updateVideoElementScale end is ${endScaleX}, ${endScaleY} style is ${endStyleTransform}`); $videoElement.style.transform = endStyleTransform; } } class AiLoader extends Emitter { constructor(player) { super(); this.player = player; this.faceDetector = null; this.objectDetector = null; this.imageDetector = null; this.occlusionDetector = null; this.initFaceDetector(); this.initObjectDetector(); this.initImageDetector(); this.initOcclusionDetector(); let tips = 'init'; if (this.faceDetector) { tips += ' and use faceDetector'; } if (this.objectDetector) { tips += ' and use objectDetector'; } if (this.imageDetector) { tips += ' and use imageDetector'; } if (this.occlusionDetector) { tips += ' and use occlusionDetector'; } this.player.debug.log('AiLoader', tips); } destroy() { this.off(); if (this.faceDetector) { this.faceDetector.destroy(); this.faceDetector = null; } if (this.objectDetector) { this.objectDetector.destroy(); this.objectDetector = null; } if (this.imageDetector) { this.imageDetector.destroy(); this.imageDetector = null; } if (this.occlusionDetector) { this.occlusionDetector.destroy(); this.occlusionDetector = null; } this.player.debug.log('AiLoader', 'destroy'); } initFaceDetector() { if (this.player._opt.useFaceDetector && window.JessibucaProFaceDetector) { const faceDetector = new JessibucaProFaceDetector({ detectWidth: this.player._opt.aiFaceDetectWidth, showRect: false, debug: this.player._opt.debug, debugLevel: this.player._opt.debugLevel, debugUuid: this.player._opt.debugUuid }); faceDetector.load().then(() => { this.player.debug.log('AiLoader', 'init face detector success'); this.faceDetector = faceDetector; this.faceDetector.on('jessibuca-pro-face-detector-info', info => { this.player.emit(EVENTS.aiFaceDetectorInfo, info); if (this.player._opt.aiFaceDetectShowRect) { const config = this.player._opt.aiFaceDetectRectConfig || {}; const list = (info.list || []).map(item => { item.type = 'rect'; item.color = config.borderColor || '#0000FF'; item.lineWidth = config.borderWidth || 2; return item; }); if (this.player.video) { this.player.video.addAiContentToCanvas(list); } } }); }); } } initObjectDetector() { if (this.player._opt.useObjectDetector && window.JessibucaProObjectDetector) { const objectDetector = new JessibucaProObjectDetector({ detectWidth: this.player._opt.aiObjectDetectWidth, showRect: false, debug: this.player._opt.debug, debugLevel: this.player._opt.debugLevel, debugUuid: this.player._opt.debugUuid }); objectDetector.load().then(() => { this.player.debug.log('AiLoader', 'init object detector success'); this.objectDetector = objectDetector; this.objectDetector.on('jessibuca-pro-object-detector-info', info => { this.player.emit(EVENTS.aiObjectDetectorInfo, info); if (this.player._opt.aiObjectDetectShowRect) { const list = []; const config = this.player._opt.aiObjectDetectRectConfig || {}; (info.list || []).forEach(item => { const boxItem = { type: 'rect', color: config.borderColor || '#0000FF', lineWidth: config.borderWidth || 2, x: item.rect.x, y: item.rect.y, width: item.rect.width, height: item.rect.height }; const textItem = { type: 'text', color: config.color || '#000', fontSize: config.fontSize || 14, text: item.zh, x: item.rect.x, y: item.rect.y - 25 }; list.push(boxItem, textItem); }); if (this.player.video) { this.player.video.addAiContentToCanvas(list); } } }); }); } } initImageDetector() { if (this.player._opt.useImageDetector && window.JessibucaProImageDetector) { const imageDetector = new JessibucaProImageDetector({ debug: this.player._opt.debug, debugLevel: this.player._opt.debugLevel, debugUuid: this.player._opt.debugUuid }); imageDetector.load().then(() => { this.player.debug.log('AiLoader', 'init image detector success'); this.imageDetector = imageDetector; }); } } initOcclusionDetector() { if (this.player._opt.useOcclusionDetector && window.JessibucaProOcclusionDetector) { const occlusionDetector = new JessibucaProOcclusionDetector({ debug: this.player._opt.debug, debugLevel: this.player._opt.debugLevel, debugUuid: this.player._opt.debugUuid }); occlusionDetector.load().then(() => { this.player.debug.log('AiLoader', 'init occlusion detector success'); this.occlusionDetector = occlusionDetector; }); } } updateFaceDetectorConfig() { let config = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (this.faceDetector) { this.faceDetector.updateConfig(config); } } updateObjectDetectorConfig() { let config = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (this.objectDetector) { this.objectDetector.updateConfig(config); } } updateImageDetectorConfig() { let config = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (this.imageDetector) { this.imageDetector.updateConfig(config); } } updateOcclusionDetectorConfig() { let config = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (this.occlusionDetector) { this.occlusionDetector.updateConfig(config); } } } // 右键菜单 class Contextmenu extends Emitter { constructor(player) { super(); this.player = player; this.LOG_NAME = 'Contextmenu'; this.menuList = []; this.$contextmenus = player.control.$contextmenus; if (!isMobile()) { this.init(); } else { this.player.debug.warn(this.LOG_NAME, 'not support mobile'); } player.debug.log(this.LOG_NAME, 'init'); } destroy() { this.menuList = []; this.player.debug.log(this.LOG_NAME, 'destroy'); } get isShow() { return hasClass(this.player.$container, 'jb-pro-contextmenus-show'); } show() { addClass(this.player.$container, 'jb-pro-contextmenus-show'); } hide() { removeClass(this.player.$container, 'jb-pro-contextmenus-show'); } init() { const { events: { proxy }, debug } = this.player; if (this.player._opt.contextmenuBtns.length > 0) { this.player._opt.contextmenuBtns.forEach(btn => { this.addMenuItem(btn); }); } proxy(this.player.$container, 'contextmenu', e => { e.preventDefault(); this.show(); const mouseX = e.clientX; const mouseY = e.clientY; const { height: cHeight, width: cWidth, left: cLeft, top: cTop } = this.player.$container.getBoundingClientRect(); const { height: mHeight, width: mWidth } = this.$contextmenus.getBoundingClientRect(); let menuLeft = mouseX - cLeft; let menuTop = mouseY - cTop; if (mouseX + mWidth > cLeft + cWidth) { menuLeft = cWidth - mWidth; } if (mouseY + mHeight > cTop + cHeight) { menuTop = cHeight - mHeight; } setStyle$1(this.$contextmenus, { left: `${menuLeft}px`, top: `${menuTop}px` }); }); proxy(this.player.$container, 'click', e => { if (!includeFromEvent(e, this.$contextmenus)) { this.hide(); } }); this.player.on(EVENTS.blur, () => { this.hide(); }); } _validateMenuItem(options) { let result = true; if (!options.content) { this.player.debug.warn(this.LOG_NAME, 'content is required'); result = false; } return result; } addMenuItem() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; const defaultOptions = getDefaultMenuOptions(); options = Object.assign({}, defaultOptions, options); if (!this._validateMenuItem(options)) { return; } const { events: { proxy }, debug } = this.player; const uuid = uuid16(); const $btn = ` <div class="jb-pro-contextmenu jb-pro-contextmenu-${uuid}"> ${options.content} </div> `; const $childList = Array.from(this.$contextmenus.children); const nextChild = $childList[options.index]; if (nextChild) { // insert before nextChild.insertAdjacentHTML('beforebegin', $btn); } else { append(this.$contextmenus, $btn); } const $menuItem = this.$contextmenus.querySelector(`.jb-pro-contextmenu-${uuid}`); if (options.click) { proxy($menuItem, 'click', e => { e.preventDefault(); options.click.call(this.player, this, e); this.hide(); }); } this.menuList.push({ uuid, $menuItem }); } } class WebrtcForSRSDecoder extends CommonWebrtc { constructor(player) { super(player); this.TAG_NAME = 'WebrtcForSRSDecoder'; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.TAG_NAME, 'destroy'); } loadSource(url) { return new Promise((resolve, reject) => { const rtcPeerConnection = this.rtcPeerConnection; rtcPeerConnection.createOffer().then(res => { rtcPeerConnection.setLocalDescription(res); this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp loadSource`); getWebRtcRemoteSdpForSRS(url, res.sdp).then(response => { this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp response and code is ${response.code}`); const ret = response; if (ret && ret.code !== 0) { return reject(ret.msg); } if (ret) { rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({ type: "answer", sdp: ret })).then(() => { resolve(); }).catch(e => { reject(e); }); } else { reject('sdp is null'); } }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource getWebRtcRemoteSdp response error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource rtcPeerConnection.createOffer() error`, e); reject(e); }); }); } } class WebrtcForOthersDecoder extends CommonWebrtc { constructor(player) { super(player); this.TAG_NAME = 'WebrtcForOthersDecoder'; this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { super.destroy(); this.player.debug.log(this.TAG_NAME, 'destroy'); } loadSource(url) { return new Promise((resolve, reject) => { const rtcPeerConnection = this.rtcPeerConnection; rtcPeerConnection.createOffer().then(res => { rtcPeerConnection.setLocalDescription(res); this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp loadSource`); getWebRtcRemoteSdpForOthers(url, res.sdp).then(response => { this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp response and code is ${response.code}`); response.text().then(sdp => { this.player.debug.log(this.TAG_NAME, `getWebRtcRemoteSdp response`); try { /** 判断是否可以解析为json格式 */ let jsdp = JSON.parse(sdp); this.player.debug.log(this.TAG_NAME, `this is json sdp response`); if (jsdp.code != 0) { this.player.debug.log(this.TAG_NAME, `response json code ${jsdp.code}`); reject(new Error(`response sdp json code: ${jsdp.code}`)); } sdp = jsdp.sdp; } catch (error) { this.player.debug.log(this.TAG_NAME, `this is raw sdp response`); } if (sdp) { rtcPeerConnection.setRemoteDescription(new RTCSessionDescription({ type: "answer", sdp })).then(() => { resolve(); }).catch(e => { reject(e); }); } else { reject('sdp is null'); } }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource response.text() error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource getWebRtcRemoteSdp response error`, e); reject(e); }); }).catch(e => { this.player.debug.error(this.TAG_NAME, `loadSource rtcPeerConnection.createOffer() error`, e); reject(e); }); }); } } class AliyunRtc extends Emitter { constructor(player) { super(); this.TAG_NAME = 'AliyunRtc'; this.player = player; if (!window.AliRTS) { throw new Error('AliyunRtc is not defined'); } this.aliyunRtc = window.AliRTS.createClient(); this.aliyunRtcRemoteStream = null; // video element this.$videoElement = this.player.video.$videoElement; this.listenEvents(); this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { if (this.aliyunRtc) { if (this.aliyunRtcRemoteStream) { this.aliyunRtcRemoteStream = null; } this.aliyunRtc.unsubscribe(); this.aliyunRtc = null; } this.off(); this.player.debug.log(this.TAG_NAME, 'destroy'); } // 监听事件,详见 https://help.aliyun.com/document_detail/397570.html listenEvents() { /* * 在onError中获取到错误码10201时,此时网页的音频是静音的, * 需要用户在网页上手动触发事件(必须有用户交互,不能直接通过代码控制) * 调用remoteStream.muted = false取消静音 */ this.aliyunRtc.on('onError', err => { // console.log("错误事件: ", err); this.player.debug.log(this.TAG_NAME, `onError and code is ${err.errorCode} and message: ${err.message}`); // https://help.aliyun.com/zh/live/user-guide/error-codes?spm=a2c4g.11186623.0.0.67c86f9fsWhfY5 // 拉流重连中。 if (err.errorCode !== 10400) { this.player.debug.error(this.TAG_NAME, `onError and code is ${err.errorCode} and message: ${err.message}`); this.player.emitError(EVENTS_ERROR.aliyunRtcError, err); } }); // 监听重连事件: evt是重连原因 this.aliyunRtc.on('reconnect', evt => { // console.log("重连事件: ", evt); this.player.debug.log(this.TAG_NAME, 'reconnect', evt); }); const PLAY_EVENT = { CANPLAY: "canplay", // 播放准备完成 WAITING: "waiting", // 卡顿 PLAYING: "playing", // 卡顿恢复 MEDIA: "media" // 每秒上报媒体实时状态 }; this.aliyunRtc.on('onPlayEvent', evt => { /* evt 数据结构: { event: string, // PLAY_EVENT data: any, // 数据 } */ if (evt.event === PLAY_EVENT.CANPLAY) { // console.log("播放准备就绪"); this.player.debug.log(this.TAG_NAME, 'onPlayEvent and canplay'); } else if (evt.event === PLAY_EVENT.WAITING) { // console.log("发生卡顿"); this.player.debug.log(this.TAG_NAME, 'onPlayEvent and playing - > waiting'); } else if (evt.event === PLAY_EVENT.PLAYING) { // console.log("卡顿恢复,播放继续"); this.player.debug.log(this.TAG_NAME, 'onPlayEvent and waiting -> playing'); } else if (evt.event === PLAY_EVENT.MEDIA) { // console.log("每秒实时媒体数据: ", evt.data); const mediaData = evt.data; /* evt.data 数据结构: { url: string, // 播放地址 aMsid: stirng, // 音频id(默认值'rts audio') audio: { // (部分浏览器不支持) bytesReceivedPerSecond: number, // 音频码率 lossRate: number, // 音频丢包率 rtt: number, // RTT 音/视频共用 }, vMsid: string, // 视频id(默认值'rts video') video: { // (部分浏览器不支持) bytesReceivedPerSecond: number, // 视频码率 framesDecodedPerSecond: number, // 解码帧率 fps: number, // 渲染帧率 height: number, // 分辨率高度 width: number, // 分辨率宽度 lossRate: number, // 视频丢包率 rtt: number, // RTT 音/视频共用 }, networkQuality: number, // 网络状况评分 } // networkQuality 网络状况评分取值含义: // 0: 未知, 1: 极佳, 2: 较好, 3: 一般, 4: 较差, 5: 极差, 6: 无网络 */ let stateContent = {}; let rate = 0; if (mediaData.audio) { const bytesReceivedPerSecond = Math.floor(mediaData.audio.bytesReceivedPerSecond); rate += bytesReceivedPerSecond; stateContent.abps = bytesReceivedPerSecond; } if (mediaData.video) { const bytesReceivedPerSecond = Math.floor(mediaData.video.bytesReceivedPerSecond); rate += bytesReceivedPerSecond; stateContent.vbps = bytesReceivedPerSecond; } this.player.updateStats(stateContent); this.player.emit(EVENTS.kBps, (rate / 1024).toFixed(2)); } }); } loadSource(url) { return new Promise((resolve, reject) => { /** * isSupport检测是否可用 * @param {Object} supportInfo 检测信息 * @param {boolean} supportInfo.isReceiveVideo 是否拉视频流 * @return {Promise} */ this.aliyunRtc.isSupport({ isReceiveVideo: true }).then(() => { /** * rts开始拉流接口 * @param {string} pullStreamUrl 拉流地址,在地址后添加@subaudio=no或者@subvideo=no来表示不订阅音频流或视频流 * @param {Object} [config] (可选)自定义配置 * @param {string} [config.signalUrl] (可选)信令地址 * @param {number} [config.retryTimes] (可选)最大重连次数(默认5) * @param {number} [config.retryInterval] (可选)重连间隔(单位ms,默认2000) * @return {Promise} */ this.aliyunRtc.subscribe(url, {}).then(remoteStream => { this.aliyunRtcRemoteStream = remoteStream; // mediaElement是媒体标签audio或video remoteStream.play(this.$videoElement); resolve(); // 调用 remoteStream.play 会将媒体流绑定到媒体标签并尝试自动播放, // 如不希望自动播放,可以传入第二个参数 {autoplay:false},从2.2.4版本开始支持 // remoteStream.play(mediaElement, {autoplay:false}); }).catch(err => { this.player.debug.error(this.TAG_NAME, 'loadSource and subscribe is not success: ', err.message); reject(err.message); }); }).catch(err => { this.player.debug.error(this.TAG_NAME, 'loadSource and is not support: ', err.message); reject(err.message); // 不支持 }); }); } getVideoCurrentTime() { let result = 0; if (this.$videoElement) { result = this.$videoElement.currentTime; } return result; } } class PressureObserverCpu { constructor(player) { this.player = player; this.TAG_NAME = 'PressureObserverCpu'; this.observer = null; this.latestCpuInfo = null; this.currentLevel = -1; this._init(); this.player.debug.log(this.TAG_NAME, 'init'); } destroy() { if (this.observer) { // this.observer.unobserve('cpu') this.observer.disconnect(); this.observer = null; } this.latestCpuInfo = null; this.currentLevel = -1; this.player.debug.log(this.TAG_NAME, 'destroy'); } getCurrentCpuState() { return this.currentLevel; } _init() { if (isFalse(supportPressureObserver())) { this.player.debug.log(this.TAG_NAME, 'not support PressureObserver'); return; } this.observer = new PressureObserver(changes => { const latestCpuInfo = (changes || []).find(item => { return item.source === 'cpu'; }); // cpu if (latestCpuInfo) { this.latestCpuInfo = latestCpuInfo; switch (latestCpuInfo.state) { case "nominal": // CPU 的压力正常 this.currentLevel = 0; break; case "fair": // 表示 CPU 的压力正常,但是有一些任务正在运行。 this.currentLevel = 1; break; case "serious": // 表示 CPU 的压力严重,但是仍然可以正常工作。 this.currentLevel = 2; break; case "critical": // 表示 CPU 的压力非常严重,无法正常工作。 this.currentLevel = 3; break; default: this.currentLevel = -1; break; } this.player.emit(EVENTS.pressureObserverCpu, this.currentLevel); } }); if (this.observer) { this.observer.observe("cpu"); } } } /** * 注意: * 1. 水印移动范围为实际视频显示区域,如果视频自带黑边,播放器无法进行规避。 * 2. 在使用动态水印功能时,播放器对象的引用不能暴露到全局环境,否则动态水印可以轻易去除。 * 3. 动态水印不适合移动端场景,特别是劫持播放的场景。常见问题:https://cloud.tencent.com/document/product/881/20219 * 4. 可以通过屏蔽全屏按钮,规避部分全屏后被劫持导致水印失效的情况。 */ class DynamicWatermark extends SingleWatermark { constructor(player) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; super(player); this.TAG_NAME = "DynamicWatermark"; this.isPauseAnimation = false; this.isStopAnimation = false; this.rafID = null; this.speed = options.speed || 0.2; this.isDynamic = true; // 是否开启动态水印 this.shadowRootDynamicDom = null; this.isGhost = options.isGhost === true ? true : false; // 是否开启幽灵模式 this.ghostConfig = { on: options.on || 5, // 幽灵模式开启时间 off: options.off || 5 // 幽灵模式关闭时间 }; this.waterMarkWillRemove = false; this.waterMarkWillAdd = false; this.player.once(EVENTS.start, () => { const content = isFalse(isRelease) ? decodeURIComponent(PLAYER_NAME + '%20%E4%BD%93%E9%AA%8C') : options.content; let videoInfo = this.player.getVideoInfo(); if (!videoInfo) { videoInfo = { width: 200, height: 200 }; } this.update({ text: { content: content, fontSize: options.fontSize || 18, color: options.color || 'white' }, // 这里可以获取到播放器的宽和高 left: videoInfo.width * Math.random(), top: videoInfo.height * Math.random(), opacity: options.opacity || 0.15 }); this.startAnimation(); }); this.player.debug.log(this.TAG_NAME, 'int'); } destroy() { super.destroy(); if (this.rafID) { cancelAnimationFrame(this.rafID); this.rafID = null; } } startAnimation() { if (!this.rafID) { // 垂直方向 let columnArrow = 1; // 水平方向 let levelArrow = 1; let random = Math.random(); let markContentElementWidthAndHeight = { width: 0, height: 0 }; const animation = () => { try { if (isFalse(this.isPauseAnimation)) { if (this.shadowRootDynamicDom && this.shadowRootInnerDom) { const containerElement = this.shadowRootInnerDom; const markContentElement = this.shadowRootDynamicDom; const containerElementRect = containerElement.getBoundingClientRect(); const markContentElementRect = markContentElement.getBoundingClientRect(); if (markContentElementRect.width && markContentElementRect.height) { markContentElementWidthAndHeight.width = markContentElementRect.width; markContentElementWidthAndHeight.height = markContentElementRect.height; } if (!this.shadowRootInnerDom.contains(this.shadowRootDynamicDom)) { if (this.isGhost) { // 对于幽灵模式,如果幽灵模式开启时,水印被移除,需要重新添加 if (isFalse(this.waterMarkWillAdd)) { this.waterMarkWillAdd = true; setTimeout(() => { this._addDom(containerElementRect, markContentElementWidthAndHeight); this.waterMarkWillAdd = false; }, 1000 * this.ghostConfig.off); } } else { this._addDom(containerElementRect, markContentElementWidthAndHeight); } if (this.speed !== 0) { requestAnimationFrame(animation); } return; } const speed = Math.min(1, 0 === this.speed ? 0 : this.speed ? this.speed : .2); let offsetLeft = markContentElementRect.left - containerElementRect.left; let offsetTop = markContentElementRect.top - containerElementRect.top; offsetLeft += speed * levelArrow * random; offsetTop += speed * columnArrow * (1 - random); if (offsetLeft + markContentElementWidthAndHeight.width > containerElementRect.width) { levelArrow = -1; random = Math.random(); } else if (offsetLeft < 0) { levelArrow = 1; random = Math.random(); } if (offsetTop + markContentElementWidthAndHeight.height > containerElementRect.height) { columnArrow = -1; random = Math.random(); } else if (offsetTop < 0) { columnArrow = 1; random = Math.random(); } offsetLeft = Math.min(containerElementRect.width - markContentElementWidthAndHeight.width, offsetLeft); offsetTop = Math.min(containerElementRect.height - markContentElementWidthAndHeight.height, offsetTop); const contentLeft = offsetLeft / containerElementRect.width * 100; const contentTop = offsetTop / containerElementRect.height * 100; this.shadowRootDynamicDom.style.left = `${contentLeft}%`; this.shadowRootDynamicDom.style.top = `${contentTop}%`; if (isFalse(this.waterMarkWillRemove) && this.isGhost) { this.waterMarkWillRemove = true; setTimeout(() => { this._removeDom(); this.waterMarkWillRemove = false; }, 1000 * this.ghostConfig.on); } } } } catch (e) {} if (this.isStopAnimation) { this.isStopAnimation = false; cancelAnimationFrame(this.rafID); this.rafID = null; return; } if (this.speed !== 0) { requestAnimationFrame(animation); } }; this.rafID = requestAnimationFrame(animation); } } _addDom(containerElementRect, markContentElementWidthAndHeight) { if (this.shadowRootInnerDom && this.shadowRootDynamicDom) { this.shadowRootInnerDom.appendChild(this.shadowRootDynamicDom); // 需要更新下位置 let offsetLeft = containerElementRect.width * Math.random(); let offsetTop = containerElementRect.height * Math.random(); offsetLeft = Math.min(containerElementRect.width - markContentElementWidthAndHeight.width * 2, offsetLeft); offsetTop = Math.min(containerElementRect.height - markContentElementWidthAndHeight.height * 2, offsetTop); this.shadowRootDynamicDom.style.left = `${offsetLeft}px`; this.shadowRootDynamicDom.style.top = `${offsetTop}px`; } } resumeAnimation() { this.isPauseAnimation = false; } pauseAnimation() { this.isPauseAnimation = true; } stopAnimation() { this.isStopAnimation = true; } } class Player extends Emitter { constructor(container) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; super(); /**@type {import('../constant').DEFAULT_PLAYER_OPTIONS}*/ this._opt = {}; this.TAG_NAME = 'Player'; this.$container = container; const defaultOptions = getDefaultPlayerOptions(); // this._opt = Object.assign({}, defaultOptions, options); this.debug = new Debug(this); this.debug.log(this.TAG_NAME, 'init'); // disable offscreen this._opt.forceNoOffscreen = true; this._canPlayAppleMpegurl = false; if (isSafari() || isIOS()) { this._canPlayAppleMpegurl = canPlayAppleMpegurl(); this.debug.log(this.TAG_NAME, 'isIOS or isSafari and canPlayAppleMpegurl', this._canPlayAppleMpegurl); } // mobile not support auto hide control if (isMobile()) { this._opt.controlAutoHide = false; } // ios video object-fill style has bug so set false if (isIphone()) { this.debug.log(this.TAG_NAME, 'isIphone and set _opt.videoRenderSupportScale false'); this._opt.videoRenderSupportScale = false; // if in wechat and is hls so set supportHls265 true if (isWeChat() && isTrue(this._opt.isHls) && isFalse(this._opt.supportHls265)) { this.debug.log(this.TAG_NAME, 'isIphone and is in wechat and is hls so set supportHls265 true'); this._opt.supportHls265 = true; } } if (isFalse(this._opt.playFailedAndReplay)) { this.debug.log(this.TAG_NAME, '_opt.playFailedAndReplay is false and set others replay params false'); // todo:记得每次新增了异常事件之后,这里都需要添加。 this._opt.webglAlignmentErrorReplay = false; this._opt.webglContextLostErrorReplay = false; this._opt.autoWasm = false; this._opt.mseDecodeErrorReplay = false; this._opt.mediaSourceTsIsMaxDiffReplay = false; this._opt.wcsDecodeErrorReplay = false; this._opt.wasmDecodeErrorReplay = false; this._opt.simdDecodeErrorReplay = false; this._opt.videoElementPlayingFailedReplay = false; this._opt.networkDelayTimeoutReplay = false; this._opt.widthOrHeightChangeReplay = false; this._opt.simdH264DecodeVideoWidthIsTooLargeReplay = false; this._opt.mediaSourceUseCanvasRenderPlayFailedReplay = false; this._opt.heartTimeoutReplay = false; this._opt.loadingTimeoutReplay = false; this._opt.websocket1006ErrorReplay = false; } if (!this._opt.forceNoOffscreen) { if (!supportOffscreenV2()) { this._opt.forceNoOffscreen = true; this._opt.useOffscreen = false; } else { this._opt.useOffscreen = true; } } // mpeg4 only can use ffmpeg to decode if (this._opt.isMpeg4) { this.debug.log(this.TAG_NAME, 'isMpeg4 is true, so set _opt.useWasm true and others params false'); this._opt.useWCS = false; this._opt.useMSE = false; this._opt.isNakedFlow = false; this._opt.useSIMD = false; this._opt.isFmp4 = false; this._opt.useWasm = true; } if (this.isPlayback()) { this._opt.mseDecoderUseWorker = false; } if (this._opt.mseDecoderUseWorker) { this._opt.mseDecoderUseWorker = supportMSEForWorker(); if (isFalse(this._opt.mseDecoderUseWorker)) { this.debug.log(this.TAG_NAME, 'mseDecoderUseWorker is true but not support so set _opt.mseDecoderUseWorker = false'); } } // if old hls or old webrtc if (this.isOldHls() || this.isWebrtcH264() || this.isAliyunRtc()) { this.debug.log(this.TAG_NAME, 'isOldHls or isWebrtcH264 or isAliyunRtc is true, so set some params false and _opt.recordType is webm'); this._opt.useWCS = false; this._opt.useMSE = false; this._opt.isNakedFlow = false; this._opt.useSIMD = false; this._opt.isFmp4 = false; this._opt.useWasm = false; this._opt.recordType = FILE_SUFFIX.webm; } // if naked flow if (this._opt.isNakedFlow) { this.debug.log(this.TAG_NAME, 'isNakedFlow is true, so set _opt.mseDecodeAudio false'); this._opt.mseDecodeAudio = false; // this._opt.videoBufferDelay = 10 * 60 * 1000; } // 排除hls 和 webrtc(h264) 协议 if (!this.isOldHls() && !this.isWebrtcH264()) { if (this._opt.useWCS) { const isSupportWCS = supportWCS(); const isSupportWCSHevc = supportWCSDecodeHevc(); this._opt.useWCS = isSupportWCS; if (this._opt.useWCS && this._opt.isH265) { this._opt.useWCS = isSupportWCSHevc; } if (!this._opt.useWCS) { this.debug.warn(this.TAG_NAME, ` useWCS is true, and supportWCS is ${isSupportWCS}, supportHevcWCS is ${isSupportWCSHevc} , _opt.isH265 is ${this._opt.isH265} so set useWCS false`); } if (this._opt.useWCS) { if (this._opt.useOffscreen) { this._opt.wcsUseVideoRender = false; } else { if (this._opt.wcsUseVideoRender) { this._opt.wcsUseVideoRender = supportMediaStreamTrack() && supportMediaStream(); if (!this._opt.wcsUseVideoRender) { this.debug.warn(this.TAG_NAME, 'wcsUseVideoRender is true, but not support so set wcsUseVideoRender false'); } } } } } if (this._opt.useMSE) { const isSupportMSE = supportMSE() || supportIosMSE(); const isSupportMSEHevc = supportMSEDecodeHevc() || supportIosMSEDecodeHevc(); this._opt.useMSE = isSupportMSE; if (this._opt.useMSE && this._opt.isH265) { this._opt.useMSE = isSupportMSEHevc; } if (!this._opt.useMSE) { this.debug.warn(this.TAG_NAME, ` useMSE is true, and supportMSE is ${isSupportMSE}, supportHevcMSE is ${isSupportMSEHevc} , _opt.isH265 is ${this._opt.isH265} so set useMSE false`); } } } if (isFalse(this._opt.useMSE)) { this._opt.mseDecodeAudio = false; } // 如果使用mse则强制不允许 webcodecs if (this._opt.useMSE) { if (this._opt.useWCS) { this.debug.warn(this.TAG_NAME, 'useMSE is true and useWCS is true then useWCS set true->false'); } if (!this._opt.forceNoOffscreen) { this.debug.warn(this.TAG_NAME, 'useMSE is true and forceNoOffscreen is false then forceNoOffscreen set false->true'); } this._opt.useWCS = false; this._opt.forceNoOffscreen = true; } else if (this._opt.useWCS) ; if (this._opt.isWebrtc) { if (this._opt.demuxUseWorker) { this.debug.warn(this.TAG_NAME, 'isWebrtc is true and demuxUseWorker is true then demuxUseWorker set true->false'); this._opt.demuxUseWorker = false; } } if (this._opt.isHls) { if (this._opt.demuxUseWorker) { this.debug.warn(this.TAG_NAME, 'isHls is true and demuxUseWorker is true then demuxUseWorker set true->false'); this._opt.demuxUseWorker = false; } } if (this._opt.isAliyunRtc) { if (this._opt.demuxUseWorker) { this.debug.warn(this.TAG_NAME, 'isAliyunRtc is true and demuxUseWorker is true then demuxUseWorker set true->false'); this._opt.demuxUseWorker = false; } } if (this.isStreamWebTransport()) { if (this._opt.demuxUseWorker) { this.debug.warn(this.TAG_NAME, 'is stream use webTransport is true and demuxUseWorker is true then demuxUseWorker set true->false'); this._opt.demuxUseWorker = false; } } // if (this.isPlayback()) { // if (this._opt.demuxUseWorker) { // this.debug.warn(this.TAG_NAME, 'playback is true and demuxUseWorker is true then demuxUseWorker set true->false'); // this._opt.demuxUseWorker = false; // } // } if (isFalse(this._opt.demuxUseWorker)) { this._opt.mseDecoderUseWorker = false; } // playback not support mseDecoderUseWorker if (this.isPlayback()) { this._opt.mseDecoderUseWorker = false; } if (this._opt.useMThreading) { this._opt.useMThreading = isSupportSharedArrayBuffer(); if (!this._opt.useMThreading) { this.debug.warn(this.TAG_NAME, 'useMThreading is true, but not support so set useMThreading false'); } } // check simd if (this._opt.useSIMD || this._opt.decoder.indexOf('-simd') !== -1) { // iphone latest version ,check support simd ,but decode is crash screen,so kill iphone。 const isSupportSimd = isSupportSIMD(); const _isIphone = isIphone(); this._opt.useSIMD = isSupportSimd && isFalse(_isIphone); if (!this._opt.useSIMD) { this.debug.warn(this.TAG_NAME, `useSIMD is true, but not support(isSupportSimd is ${isSupportSimd} ,isIphone is ${_isIphone}) so set useSIMD false`); } } if (this._opt.useSIMD) { // wasm simd if (this._opt.decoder.indexOf('-simd') === -1) { if (this._opt.useMThreading) { this._opt.decoder = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-simd-mt.js'); } else { this._opt.decoder = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-simd.js'); } } else { if (this._opt.useMThreading) { this._opt.decoder = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro-simd-mt.js'); } } } else { // wasm if (this._opt.decoder.indexOf('-simd') !== -1) { if (this._opt.useMThreading) { this._opt.decoder = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro-mt.js'); } else { this._opt.decoder = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro.js'); } } else { if (this._opt.useMThreading) { this._opt.decoder = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-mt.js'); } } } // wasm simd if (this._opt.decoder.indexOf('-simd') !== -1) { if (this._opt.useMThreading) { this._opt.decoderAudio = this._opt.decoder.replace('decoder-pro-simd-mt.js', 'decoder-pro-audio.js'); this._opt.decoderHard = this._opt.decoder.replace('decoder-pro-simd-mt.js', 'decoder-pro-hard.js'); this._opt.decoderHardNotWasm = this._opt.decoder.replace('decoder-pro-simd-mt.js', 'decoder-pro-hard-not-wasm.js'); } else { this._opt.decoderAudio = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro-audio.js'); this._opt.decoderHard = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro-hard.js'); this._opt.decoderHardNotWasm = this._opt.decoder.replace('decoder-pro-simd.js', 'decoder-pro-hard-not-wasm.js'); } } else { // wasm if (this._opt.useMThreading) { this._opt.decoderAudio = this._opt.decoder.replace('decoder-pro-mt.js', 'decoder-pro-audio.js'); this._opt.decoderHard = this._opt.decoder.replace('decoder-pro-mt.js', 'decoder-pro-hard.js'); this._opt.decoderHardNotWasm = this._opt.decoder.replace('decoder-pro-mt.js', 'decoder-pro-hard-not-wasm.js'); } else { this._opt.decoderAudio = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-audio.js'); this._opt.decoderHard = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-hard.js'); this._opt.decoderHardNotWasm = this._opt.decoder.replace('decoder-pro.js', 'decoder-pro-hard-not-wasm.js'); } } if (isFalse(this._opt.hasAudio)) { this._opt.operateBtns.audio = false; } if (isFalse(this._opt.hasVideo)) { this._opt.operateBtns.fullscreen = false; this._opt.operateBtns.screenshot = false; this._opt.operateBtns.record = false; this._opt.operateBtns.ptz = false; this._opt.operateBtns.quality = false; this._opt.operateBtns.zoom = false; } // if (this._opt.qualityConfig && this._opt.qualityConfig.length === 0) { if (this._opt.operateBtns.quality) { this._opt.operateBtns.quality = false; this.debug.warn(this.TAG_NAME, '_opt.qualityConfig is empty, so set operateBtns.quality false'); } } if (isTrue(this._opt.useWebGPU)) { this._opt.useWebGPU = isWebGpuSupport(); if (isFalse(this._opt.useWebGPU)) { this.debug.warn(this.TAG_NAME, 'useWebGPU is true, but not support so set useWebGPU false'); } } this._opt.hasControl = this._hasControl(); // this._loading = false; this._playing = false; this._playbackPause = false; this._hasLoaded = false; this._zooming = false; this._destroyed = false; this._closed = false; // this._checkHeartTimeout = null; this._checkLoadingTimeout = null; this._checkStatsInterval = null; this._checkVisibleHiddenTimeout = null; // this._startBpsTime = null; // this._isPlayingBeforePageHidden = false; this._stats = { buf: 0, // 当前缓冲区时长,单位毫秒, netBuf: 0, // 网络延迟时长,单位毫秒, fps: 0, // 当前视频帧率 maxFps: 0, // 最大视频帧率 dfps: 0, // 当前解码器的帧率 abps: 0, // 当前音频码率,单位bit vbps: 0, // 当前视频码率,单位bit ts: 0, // 当前视频帧pts,单位毫秒 mseTs: 0, // 当前视频帧pts(mse),单位毫秒 currentPts: 0, // 最新显示时间戳,单位毫秒(最新渲染画面) pTs: 0, // 播放时间戳 单位 秒 dts: 0, // 当前最新的dts mseVideoBufferDelayTime: 0, // mse video buffer delay time isDropping: false }; this._allStatsData = {}; this._faceDetectActive = false; this._objectDetectActive = false; this._occlusionDetectActive = false; this._imageDetectActive = false; // 各个步骤的时间统计 this._times = initPlayTimes(); // this._videoTimestamp = 0; this._audioTimestamp = 0; this._latestAudioTimestamp = 0; this._videoIframeIntervalTs = 0; // i 帧间隔时间。 this._streamQuality = this._opt.defaultStreamQuality || ''; // 默认取第一个值 if (!this._streamQuality && this._opt.qualityConfig.length > 0) { this._streamQuality = this._opt.qualityConfig[0] || ''; } this._visibility = true; this._lastestVisibilityChangeTimestamp = null; this._tempWorkerStats = null; this._historyFpsList = []; this._historyVideoDiffList = []; this._tempStreamList = []; this._tempInnerPlayBgobj = null; this._flvMetaData = null; this._flvMetaDataFps = null; this._mseWorkerData = {}; if (isFalse(this._opt.useMSE) && isFalse(this._opt.useWCS) && !this.isWebrtcH264() && !this.isOldHls()) { this._opt.useWasm = true; } if (this.isOldHls() || this.isWebrtcH264()) { this._opt.hasVideo = true; this._opt.hasAudio = true; } if (!this._opt.hasVideo) { this._opt.useMSE = false; this._opt.useWCS = false; } if (this._opt.useWasm) { // offscreen 模式下,不支持 video 渲染。 if (this._opt.useOffscreen) { this._opt.wasmUseVideoRender = false; } else { if (this._opt.wasmUseVideoRender) { this._opt.wasmUseVideoRender = supportWasmUseVideoRender() && supportMediaStreamTrack() && supportMediaStream(); if (!this._opt.wasmUseVideoRender) { this.debug.warn(this.TAG_NAME, 'use wasm video render, but not support so set wasmUseVideoRender false'); } } } if (this._opt.useSIMD) { this.debug.log(this.TAG_NAME, 'use simd wasm'); } else { this.debug.log(this.TAG_NAME, 'use wasm'); } } if (this._opt.useWasm && (this._opt.useFaceDetector && window.JessibucaProFaceDetector || this._opt.useObjectDetector && window.JessibucaProObjectDetector || this._opt.useOcclusionDetector && window.JessibucaProOcclusionDetector || this._opt.useImageDetector && window.JessibucaProImageDetector)) { this.ai = new AiLoader(this); if (!(this._opt.useFaceDetector && window.JessibucaProFaceDetector)) { this._opt.operateBtns.aiFace = false; } if (!(this._opt.useObjectDetector && window.JessibucaProObjectDetector)) { this._opt.operateBtns.aiObject = false; } if (!(this._opt.useOcclusionDetector && window.JessibucaProOcclusionDetector)) { this._opt.operateBtns.aiOcclusion = false; } if (this._opt.useImageDetector && this._opt.aiImageDetectActive && window.JessibucaProImageDetector) { this.imageDetectActive = true; } } else { this._opt.operateBtns.aiObject = false; this._opt.operateBtns.aiFace = false; this._opt.operateBtns.aiOcclusion = false; } if (this._opt.useFaceDetector) { if (!(this._opt.useWasm && window.JessibucaProFaceDetector)) { this.debug.warn(this.TAG_NAME, `use face detector, useWasm is ${this._opt.useWasm} and window.JbProFaceDetector is null`); } } if (this._opt.useObjectDetector) { if (!(this._opt.useWasm && window.JessibucaProObjectDetector)) { this.debug.warn(this.TAG_NAME, `use object detector, useWasm is ${this._opt.useWasm} and window.JbProObjectDetector is null`); } } if (this._opt.useOcclusionDetector) { if (!(this._opt.useWasm && window.JessibucaProOcclusionDetector)) { this.debug.warn(this.TAG_NAME, `use occlusion detector, useWasm is ${this._opt.useWasm} and window.JessibucaProOcclusionDetector is null`); } } if (this._opt.useImageDetector) { if (!(this._opt.useWasm && window.JessibucaProImageDetector)) { this.debug.warn(this.TAG_NAME, `use image detector, useWasm is ${this._opt.useWasm} and window.JessibucaProImageDetector is null`); } } // maybe useVideoRender and useCanvasRender all is true if (this._opt.useVideoRender) { if (this._opt.useWasm && !this._opt.useOffscreen) { this._opt.wasmUseVideoRender = supportWasmUseVideoRender() && supportMediaStreamTrack() && supportMediaStream(); if (!this._opt.wasmUseVideoRender) { this.debug.warn(this.TAG_NAME, 'use wasm video render, but not support so set wasmUseVideoRender false'); } } else if (this._opt.useWCS && !this._opt.useOffscreen) { this._opt.wcsUseVideoRender = supportMediaStreamTrack() && supportMediaStream(); if (!this._opt.wcsUseVideoRender) { this.debug.warn(this.TAG_NAME, 'use wcs video render, but not support so set wcsUseVideoRender false'); } } } if (this._opt.useCanvasRender) { if (this._opt.useMSE) { if (isFalse(this._opt.mseDecoderUseWorker)) { this._opt.mseUseCanvasRender = true; } } if (this._opt.useWasm) { this._opt.wasmUseVideoRender = false; } if (this._opt.useWCS) { this._opt.wcsUseVideoRender = false; } if (this.isOldHls() && !isSafari()) { this._opt.hlsUseCanvasRender = true; } if (this.isWebrtcH264()) { this._opt.webrtcUseCanvasRender = true; } } // 反推下 usexxx 为true 或者 false this._opt.useVideoRender = false; this._opt.useCanvasRender = false; if (this._opt.useWasm) { if (this._opt.wasmUseVideoRender) { this._opt.useVideoRender = true; } else { this._opt.useCanvasRender = true; } } else if (this._opt.useWCS) { if (this._opt.wcsUseVideoRender) { this._opt.useVideoRender = true; } else { this._opt.useCanvasRender = true; } } else if (this._opt.useMSE) { if (this._opt.mseUseCanvasRender) { this._opt.useCanvasRender = true; } else { this._opt.useVideoRender = true; } } else if (this.isOldHls()) { if (this._opt.hlsUseCanvasRender) { this._opt.useCanvasRender = true; } else { this._opt.useVideoRender = true; } } else if (this.isWebrtcH264()) { if (this._opt.webrtcUseCanvasRender) { this._opt.useCanvasRender = true; } else { this._opt.useVideoRender = true; } } property$1(this); this.events = new Events$1(this); if (this._opt.hasVideo) { this.video = new Video(this); this.recorder = new Recorder(this); } if (this.isOldHls()) { // todo: hls this.hlsDecoder = new HlsDecoder$1(this); this.loaded = true; } else if (this.isWebrtcH264()) { if (this._opt.isWebrtcForZLM) { this.webrtc = new WebrtcForZLMDecoder(this); } else if (this._opt.isWebrtcForSRS) { this.webrtc = new WebrtcForSRSDecoder(this); } else if (this._opt.isWebrtcForOthers) { this.webrtc = new WebrtcForOthersDecoder(this); } else { this.webrtc = new WebrtcDecoder(this); } this.loaded = true; } else if (this.isAliyunRtc()) { this.aliyunRtcDecoder = new AliyunRtc(this); this.loaded = true; } else { if (this.isUseHls265()) { this.hlsDecoder = new HlsDecoder(this); } if (this.isWebrtcH265()) { this.webrtc = new WebrtcDecoder(this); } if (isFalse(onlyMseOrWcsVideo(this._opt))) { this.decoderWorker = new DecoderWorker(this); } else { this.loaded = true; } } if (this._opt.hasAudio) { this.audio = new Audio(this); } this.stream = null; this.demux = null; this._lastVolume = null; this._isMute = null; this._isInZoom = false; this._playingStartTimestamp = null; this.isMSEVideoDecoderInitializationFailedNotSupportHevc = false; this.isMSEAudioDecoderError = false; this.isPlayFailedAndPaused = false; if (this._opt.useWCS) { this.webcodecsDecoder = new WebcodecsDecoder(this); if (!this._opt.hasAudio && isFalse(this._opt.demuxUseWorker)) { this.loaded = true; } } if (this._opt.useMSE && isFalse(this._opt.mseDecoderUseWorker)) { this.mseDecoder = new MediaSource$3(this); if (!this._opt.hasAudio && isFalse(this._opt.demuxUseWorker)) { this.loaded = true; } } // this.control = new Control(this); // 右键菜单,for pc if (this._opt.contextmenuBtns.length > 0 && isFalse(this._opt.disableContextmenu) && isPc()) { this.contextmenu = new Contextmenu(this); } else { if (isTrue(this._opt.disableContextmenu) && this._opt.contextmenuBtns.length > 0 && isPc()) { this.debug.warn(this.TAG_NAME, 'disableContextmenu is true, but contextmenuBtns is not empty, so Contextmenu can not be created,please check'); } } if (this.isPlayback()) { this.playback = new Playback(this, this._opt.playbackConfig); } if (this._opt.operateBtns.zoom) { this.zoom = new Zoom(this); } if (isNoSleepMobile() && isFalse(this._opt.supportLockScreenPlayAudio && isIOS())) { this.keepScreenOn = new NoSleep(this); } events$1(this); observer(this); this.singleWatermark = new SingleWatermark(this); if (this._opt.ghostWatermarkConfig.content) { const config = Object.assign({}, this._opt.ghostWatermarkConfig, { isGhost: true }); this.ghostWatermark = new DynamicWatermark(this, config); } if (this._opt.dynamicWatermarkConfig.content) { this.dynamicWatermark = new DynamicWatermark(this, this._opt.dynamicWatermarkConfig); } if (this._opt.watermarkConfig) { this.updateWatermark(this._opt.watermarkConfig); } if (supportPressureObserver()) { this.pressureObserverCpu = new PressureObserverCpu(this); } if (this._opt.useWCS) { this.debug.log(this.TAG_NAME, 'use WCS'); } if (this._opt.useMSE) { if (this._opt.mseDecoderUseWorker) { this.debug.log(this.TAG_NAME, 'use worker MSE'); } else { this.debug.log(this.TAG_NAME, 'use MSE'); } } if (this._opt.useOffscreen) { this.debug.log(this.TAG_NAME, 'use offscreen'); } if (this._opt.isHls) { if (this._opt.supportHls265) { this.debug.log(this.TAG_NAME, 'use hls 265'); } else { this.debug.log(this.TAG_NAME, 'use hls'); } } if (this._opt.isWebrtc) { if (this._opt.isWebrtcH265) { this.debug.log(this.TAG_NAME, 'use webrtc h265'); } else { if (this._opt.isWebrtcForZLM) { this.debug.log(this.TAG_NAME, 'use webrtc for ZLM'); } else if (this._opt.isWebrtcForSRS) { this.debug.log(this.TAG_NAME, 'use webrtc for SRS'); } else if (this._opt.isWebrtcForOthers) { this.debug.log(this.TAG_NAME, 'use webrtc for Others'); } else { this.debug.log(this.TAG_NAME, 'use webrtc for M7S'); } } } if (this.isAliyunRtc()) { this.debug.log(this.TAG_NAME, 'use aliyun rtc'); } if (this._opt.isFmp4) { this.debug.log(this.TAG_NAME, 'use fmp4'); } if (this._opt.isMpeg4) { this.debug.log(this.TAG_NAME, 'use mpeg4'); } if (this.isPlayback()) { this.debug.log(this.TAG_NAME, 'use playback'); } if (this._opt.hasVideo) { if (this.width === 0) { this.debug.warn(this.TAG_NAME, 'container width is 0, please check the container width'); } if (this.height === 0) { this.debug.warn(this.TAG_NAME, 'container height is 0, please check the container height'); } } this.debug.log('Player options', JSON.stringify(this._opt)); } async destroy() { this._destroyed = true; // 其他没法解耦的,通过 destroy 方式 this.emit('destroy'); // 接触所有绑定事件 this.off(); // 接触所有的event 事件 if (this.events) { this.events.destroy(); this.events = null; } this.clearCheckLoadingTimeout(); this.clearStatsInterval(); this.clearVisibilityHiddenTimeout(); // worker if (this.decoderWorker) { await this.decoderWorker.destroy(); this.decoderWorker = null; } await this._destroy(); } async _destroy() { this._loading = false; this._playing = false; this._playbackPause = false; this._hasLoaded = false; this._lastVolume = null; this._isMute = null; this._zooming = false; this._faceDetectActive = false; this._objectDetectActive = false; this._occlusionDetectActive = false; this._imageDetectActive = false; this._times = initPlayTimes(); // 移除水印 if (this.singleWatermark) { this.singleWatermark.destroy(); this.singleWatermark = null; } if (this.ghostWatermark) { this.ghostWatermark.destroy(); this.ghostWatermark = null; } if (this.dynamicWatermark) { this.dynamicWatermark.destroy(); this.dynamicWatermark = null; } // 监听cpu 压力 if (this.pressureObserverCpu) { this.pressureObserverCpu.destroy(); this.pressureObserverCpu = null; } if (this.stream) { await this.stream.destroy(); this.stream = null; } if (this.hlsDecoder) { await this.hlsDecoder.destroy(); this.hlsDecoder = null; } if (this.mseDecoder) { this.mseDecoder.destroy(); this.mseDecoder = null; } if (this.webrtc) { this.webrtc.destroy(); this.webrtc = null; } if (this.aliyunRtcDecoder) { this.aliyunRtcDecoder.destroy(); this.aliyunRtcDecoder = null; } if (this.video) { this.video.destroy(); this.video = null; } if (this.audio) { this.audio.destroy(); this.audio = null; } if (this.recorder) { this.recorder.destroy(); this.recorder = null; } if (this.control) { this.control.destroy(); this.control = null; } if (this.webcodecsDecoder) { this.webcodecsDecoder.destroy(); this.webcodecsDecoder = null; } if (this.demux) { this.demux.destroy(); this.demux = null; } if (this.playback) { this.playback.destroy(); this.playback = null; } if (this.zoom) { this.zoom.destroy(); this.zoom = null; } if (this.ai) { this.ai.destroy(); this.ai = null; } // if (this.keepScreenOn) { this.releaseWakeLock(); this.keepScreenOn.destroy(); this.keepScreenOn = null; } // reset stats this.resetStats(); this._audioTimestamp = 0; this._latestAudioTimestamp = 0; this._videoTimestamp = 0; this._streamQuality = ''; this._visibility = true; this._isInZoom = false; this._playingStartTimestamp = null; this._lastestVisibilityChangeTimestamp = null; this._videoIframeIntervalTs = null; this._tempWorkerStats = null; this._tempStreamList = []; this._tempInnerPlayBgobj = {}; this._flvMetaData = null; this._flvMetaDataFps = null; this._mseWorkerData = {}; this.isMSEVideoDecoderInitializationFailedNotSupportHevc = false; this.isMSEAudioDecoderError = false; this.isPlayFailedAndPaused = false; this.debug.log('play', 'destroy end'); this._opt = getDefaultPlayerOptions(); this.$container = null; } set fullscreen(value) { if (isMobile() && this._opt.useWebFullScreen) { this.webFullscreen = value; } else { this.emit(EVENTS.fullscreen, value); } } get fullscreen() { return isFullScreen() || this.webFullscreen; } set webFullscreen(value) { this.emit(EVENTS.webFullscreen, value); setTimeout(() => { this.updateOption({ rotate: value ? 270 : 0 }); this.resize(); }, 10); } get webFullscreen() { return this.$container.classList.contains('jb-pro-fullscreen-web'); } set loaded(value) { this._hasLoaded = value; } // get loaded() { return this._hasLoaded || this.isOldHls() || this.isWebrtcH264() || this._opt.useMSE && isFalse(this._opt.hasAudio) && isFalse(this._opt.demuxUseWorker) || this._opt.useWCS && !this._opt.hasAudio && isFalse(this._opt.demuxUseWorker); } // set playing(value) { if (this.isClosed() && value) { this.debug.log(this.TAG_NAME, 'player is closed, so can not play'); return; } if (value && isTrue(this.loading)) { // 将loading 设置为 false this.loading = false; } if (this.playing !== value) { this._playing = value; this.emit(EVENTS.playing, value); this.emit(EVENTS.volumechange, this.volume); if (value) { this.emit(EVENTS.play); } else { this.emit(EVENTS.pause); } } } get playing() { return this._playing; } get volume() { return this.audio && this.audio.volume || 0; } set volume(value) { if (value !== this.volume) { if (this.audio) { this.audio.setVolume(value); this._lastVolume = this.volume; this._isMute = this.volume === 0; } else { this.debug.warn(this.TAG_NAME, 'set volume error, audio is null'); } } } get lastVolume() { return this._lastVolume; } set loading(value) { if (this.loading !== value) { this._loading = value; this.emit(EVENTS.loading, this._loading); } } get loading() { return this._loading; } set zooming(value) { if (this.zooming !== value) { if (!this.zoom) { this.zoom = new Zoom(this); } this._zooming = value; this.emit(EVENTS.zooming, this.zooming); } } get zooming() { return this._zooming; } set recording(value) { if (value) { if (this.playing && !this.recording) { this.recorder && this.recorder.startRecord(); if (this.isDemuxInWorker() && this.decoderWorker) { this.decoderWorker.updateWorkConfig({ key: 'isRecording', value: true }); } } } else { if (this.recording) { if (this.isDemuxInWorker() && this.decoderWorker) { this.decoderWorker.updateWorkConfig({ key: 'isRecording', value: false }); } this.recorder && this.recorder.stopRecordAndSave().then(() => {}).catch(e => {}); } } } get recording() { return this.recorder ? this.recorder.isRecording : false; } set audioTimestamp(value) { if (value === null) { return; } this._audioTimestamp = value; } // get audioTimestamp() { return this._audioTimestamp; } set latestAudioTimestamp(value) { if (value === null) { return; } this._latestAudioTimestamp = value; } get latestAudioTimestamp() { return this._latestAudioTimestamp; } // get videoTimestamp() { return this._stats.currentPts || this._stats.ts; } set streamQuality(value) { if (this.streamQuality !== value) { this._streamQuality = value; this.emit(EVENTS.streamQualityChange, value); } } get streamQuality() { return this._streamQuality; } get isDebug() { return isTrue(this._opt.debug); } get scaleType() { const opt = this._opt; const isResize = opt.isResize; const isFullResize = opt.isFullResize; let result = SCALE_MODE_TYPE.full; if (isFalse(isFullResize) && isFalse(isResize)) { result = SCALE_MODE_TYPE.full; } else if (isFalse(isFullResize) && isTrue(isResize)) { result = SCALE_MODE_TYPE.auto; } else if (isTrue(isFullResize) && isTrue(isResize)) { result = SCALE_MODE_TYPE.fullAuto; } return result; } // set visibility(value) { if (this._visibility !== value) { this._visibility = value; this.emit(EVENTS.visibilityChange, value); this._lastestVisibilityChangeTimestamp = now$2(); if (value) { this.clearVisibilityHiddenTimeout(); } else { this.startVisibilityHiddenTimeout(); } } } // get visibility() { return this._visibility; } set playbackPause(value) { if (this._playbackPause !== value) { this._playbackPause = value; // inner this.emit(EVENTS.playbackPause, value); // out this.emit(EVENTS.playbackPauseOrResume, value); } } get playbackPause() { return this.isPlayback() && this._playbackPause; } set videoIframeIntervalTs(ts) { this._videoIframeIntervalTs = ts; } get videoIframeIntervalTs() { return this._videoIframeIntervalTs; } set faceDetectActive(value) { if (this._faceDetectActive !== value) { this._faceDetectActive = value; this.emit(EVENTS.faceDetectActive, value); } } get faceDetectActive() { return this._faceDetectActive; } set objectDetectActive(value) { if (this._objectDetectActive !== value) { this._objectDetectActive = value; this.emit(EVENTS.objectDetectActive, value); } } get objectDetectActive() { return this._objectDetectActive; } set occlusionDetectActive(value) { if (this._occlusionDetectActive !== value) { this._occlusionDetectActive = value; this.emit(EVENTS.occlusionDetectActive, value); } } get occlusionDetectActive() { return this._occlusionDetectActive; } set imageDetectActive(value) { if (this._imageDetectActive !== value) { this._imageDetectActive = value; // this.emit(EVENTS.imageDetectActive, value) } } get imageDetectActive() { return this._imageDetectActive; } get isUseWorkerDemuxAndDecode() { return this.stream && this.stream.getStreamType() === PLAYER_STREAM_TYPE.worker; } isDestroyed() { return this._destroyed; } isClosed() { return this._closed; } isDestroyedOrClosed() { return this.isDestroyed() || this.isClosed(); } isPlaying() { let result = false; if (this._opt.playType === PLAY_TYPE.player) { result = this.playing; } else if (this._opt.playType === PLAY_TYPE.playbackTF) { result = isFalse(this.playbackPause) && this.playing; } return result; } /** * * @param options */ updateOption(options) { let needWorkerUpdate = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; this._opt = Object.assign({}, this._opt, options); if (isTrue(needWorkerUpdate) && this.decoderWorker) { Object.keys(options).forEach(key => { this.decoderWorker.updateWorkConfig({ key, value: options[key] }); }); } } /** * * @returns {Promise<unknown>} */ init() { return new Promise((resolve, reject) => { if (!this.video) { if (this._opt.hasVideo) { this.video = new Video(this); } } if (!this.audio) { if (this._opt.hasAudio) { this.audio = new Audio(this); } } if (!this.stream) { this.stream = new Stream$1(this); } if (this.isOldHls()) { if (!this.hlsDecoder) { // todo: hls this.hlsDecoder = new HlsDecoder$1(this); this.loaded = true; } resolve(); } else if (this.isWebrtcH264()) { if (!this.webrtc) { if (this._opt.isWebrtcForZLM) { this.webrtc = new WebrtcForZLMDecoder(this); } else if (this._opt.isWebrtcForSRS) { this.webrtc = new WebrtcForSRSDecoder(this); } else if (this._opt.isWebrtcForOthers) { this.webrtc = new WebrtcForOthersDecoder(this); } else { this.webrtc = new WebrtcDecoder(this); } this.loaded = true; } resolve(); } else if (this.isAliyunRtc()) { if (!this.aliyunRtcDecoder) { this.aliyunRtcDecoder = new AliyunRtc(this); this.loaded = true; } resolve(); } else { if (!this.demux) { if (this._opt.hasVideo && !this.isUseWorkerDemuxAndDecode) { this.demux = new Demux(this); } } if (this._opt.useWCS) { if (!this.webcodecsDecoder) { this.webcodecsDecoder = new WebcodecsDecoder(this); } } if (this._opt.useMSE && isFalse(this._opt.mseDecoderUseWorker)) { if (!this.mseDecoder) { this.mseDecoder = new MediaSource$3(this); } } if (this.isUseHls265()) { if (!this.hlsDecoder) { this.hlsDecoder = new HlsDecoder(this); } } if (this.isWebrtcH265()) { if (!this.webrtc) { this.webrtc = new WebrtcDecoder(this); } } if (this.decoderWorker) { if (this.loaded) { resolve(); } else { this.once(EVENTS.decoderWorkerInit, () => { if (this.isDestroyedOrClosed()) { this.debug.error(this.TAG_NAME, 'init() failed and player is destroyed'); reject('init() failed and player is destroyed'); } else { this.loaded = true; resolve(); } }); } } else { if (!onlyMseOrWcsVideo(this._opt)) { this.decoderWorker = new DecoderWorker(this); this.once(EVENTS.decoderWorkerInit, () => { if (this.isDestroyedOrClosed()) { this.debug.error(this.TAG_NAME, 'init() failed and player is destroyed'); reject('init() failed and player is destroyed'); } else { this.loaded = true; resolve(); } }); } else { resolve(); } } } }); } /** * * @param url * @returns {Promise<unknown>} */ play(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { if (!url && !this._opt.url) { return reject("url is empty"); } this._closed = false; this.loading = true; this.playing = false; this._times.playInitStart = now$2(); if (!url) { url = this._opt.url; } this._opt.url = url; if (this.control && this._opt.loadingBackground) { this.control.initLoadingBackground(); } this.init().then(() => { this.debug.log(this.TAG_NAME, 'play() init and next fetch stream'); this._times.playStart = now$2(); // if (this._opt.isNotMute) { this.mute(false); } this.enableWakeLock(); // this.checkLoadingTimeout(); if (this.stream) { // fetch error this.stream.once(EVENTS_ERROR.fetchError, error => { this.emitError(EVENTS_ERROR.fetchError, error); //reject(error) }); // ws this.stream.once(EVENTS_ERROR.websocketError, error => { this.emitError(EVENTS_ERROR.websocketError, error); //reject(error) }); // stream end this.stream.once(EVENTS.streamEnd, msg => { this.emit(EVENTS.streamEnd, msg); // reject('stream end'); }); // hls this.stream.once(EVENTS_ERROR.hlsError, error => { this.emitError(EVENTS_ERROR.hlsError, error); // reject(error) }); // webrtc this.stream.once(EVENTS_ERROR.webrtcError, error => { this.emitError(EVENTS_ERROR.webrtcError, error); // reject(error); }); // success this.stream.once(EVENTS.streamSuccess, () => { resolve(); this._times.streamResponse = now$2(); this.video && this.video.play(); this.checkStatsInterval(); if (this.isPlayback() && this.playback) { this.playback.startCheckStatsInterval(); } }); this.stream.fetchStream(url, options); } else { this.debug.warn(this.TAG_NAME, `play() this.stream is null and is isDestroyedOrClosed is ${this.isDestroyedOrClosed()}`); reject('this.stream is null'); } }).catch(e => { reject(e); }); }); } playForControl() { return new Promise((resolve, reject) => { this.debug.log(this.TAG_NAME, `playForControl() and pauseAndNextPlayUseLastFrameShow is ${this._opt.pauseAndNextPlayUseLastFrameShow}`); if (this._opt.pauseAndNextPlayUseLastFrameShow) { if (this._tempInnerPlayBgobj && this._tempInnerPlayBgobj.loadingBackground) { this.updateOption({ loadingBackground: this._tempInnerPlayBgobj.loadingBackground, loadingBackgroundWidth: this._tempInnerPlayBgobj.loadingBackgroundWidth, loadingBackgroundHeight: this._tempInnerPlayBgobj.loadingBackgroundHeight }); } } this.play().then(res => { resolve(res); }).catch(e => { reject(e); }); }); } /** * _close() and clearView(); */ close() { return new Promise((resolve, reject) => { this._close().then(() => { this.video && this.video.clearView(); resolve(); }).catch(e => { reject(e); }); }); } resumeAudioAfterPause() { if (this.lastVolume && isFalse(this._isMute)) { this.volume = this.lastVolume; } } /** * * @returns {Promise<unknown>} * @private */ async _close() { this._closed = true; if (this.video) { this.video.resetInit(); this.video.pause(true); } this.loading = false; this.recording = false; this.zooming = false; this.playing = false; this.clearCheckLoadingTimeout(); this.clearStatsInterval(); if (this.isPlayback() && this.playback) { this.playback.clearStatsInterval(); } // release lock this.releaseWakeLock(); // reset stats this.resetStats(); // this._audioTimestamp = 0; this._videoTimestamp = 0; // this._times = initPlayTimes(); // if (this.decoderWorker) { await this.decoderWorker.destroy(); this.decoderWorker = null; } // if (this.stream) { await this.stream.destroy(); this.stream = null; } if (this.demux) { this.demux.destroy(); this.demux = null; } if (this.webcodecsDecoder) { this.webcodecsDecoder.destroy(); this.webcodecsDecoder = null; } // if (this.mseDecoder) { this.mseDecoder.destroy(); this.mseDecoder = null; } // if (this.hlsDecoder) { await this.hlsDecoder.destroy(); this.hlsDecoder = null; } if (this.webrtc) { this.webrtc.destroy(); this.webrtc = null; } if (this.aliyunRtcDecoder) { this.aliyunRtcDecoder.destroy(); this.aliyunRtcDecoder = null; } if (this.audio) { this.audio.destroy(); this.audio = null; } } /** * * @param flag {boolean} 是否清除画面 * @returns {Promise<unknown>} */ pause() { let flag = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; return new Promise((resolve, reject) => { if (flag) { this.close().then(() => { resolve(); }).catch(e => { reject(e); }); } else { this._close().then(() => { resolve(); }).catch(e => { reject(e); }); } }); } /** * for inner use * @param flag * @returns {Promise<unknown>} * @private */ pauseForControl() { return new Promise((resolve, reject) => { this.debug.log(this.TAG_NAME, `_pauseInner()`); if (this._opt.pauseAndNextPlayUseLastFrameShow && this.video) { const loadingBackground = this.video.screenshot('', 'png', 0.92, 'base64'); if (loadingBackground) { const videoInfo = this.getVideoInfo(); if (videoInfo) { this.debug.log(this.TAG_NAME, `pauseForControl() and loadingBackground width is ${videoInfo.width} and height is ${videoInfo.height}`); this._tempInnerPlayBgobj = { loadingBackground, loadingBackgroundWidth: videoInfo.width, loadingBackgroundHeight: videoInfo.height }; } else { this.debug.warn(this.TAG_NAME, `pauseForControl() and videoInfo is null`); } } else { this.debug.warn(this.TAG_NAME, `pauseForControl() and loadingBackground is null`); } } this.pause().then(res => { resolve(res); }).catch(e => { reject(e); }); }); } isAudioMute() { let result = true; if (this.audio) { result = this.audio.isMute; } return result; } isAudioNotMute() { return !this.isAudioMute(); } /** * * @param flag */ mute(flag) { this.audio && this.audio.mute(flag); } /** * */ resize() { this.video && this.video.resize(); } /** * * @param fileName * @param fileType */ startRecord(fileName, fileType) { if (this.recording) { return; } this.recorder.setFileName(fileName, fileType); this.recording = true; } /** * */ stopRecordAndSave(type, fileName) { // if (this.recording) { // this.recording = false; // } return new Promise((resolve, reject) => { if (!this.recorder) { reject('recorder is null'); } if (this.recording) { if (this._opt.useWasm && this.decoderWorker) { this.decoderWorker.updateWorkConfig({ key: 'isRecording', value: false }); } this.recorder.stopRecordAndSave(type, fileName).then(blob => { resolve(blob); }).catch(e => { reject(e); }); } else { reject('recorder is not recording'); } }); } _hasControl() { let result = false; let hasBtnShow = false; Object.keys(this._opt.operateBtns).forEach(key => { if (this._opt.operateBtns[key] && ('' + key).indexOf('Fn') === -1) { hasBtnShow = true; } }); if (this._opt.showBandwidth || hasBtnShow) { result = true; } if (this._opt.extendOperateBtns && this._opt.extendOperateBtns.length > 0) { result = true; } if (this.isPlayback() && this._opt.playbackConfig.showControl) { result = true; } return result; } // wasm 解码 useWasmDecode() { return isFalse(this._opt.useMSE) && isFalse(this._opt.useWCS); } // wasm 或者webcodecs解码 用video 标签渲染 canVideoTrackWritter() { const _opt = this._opt; return !this.isOldHls() && // hls 旧版不支持 !this.isWebrtcH264() && // webrtc h264 不支持 isFalse(_opt.useMSE) && // mse 不支持 !this.isAliyunRtc() && ( // 阿里云rtc 不支持 _opt.useWCS && isFalse(_opt.useOffscreen) && _opt.wcsUseVideoRender || this.useWasmDecode()); } // 心跳检查,如果渲染间隔暂停了多少时间之后,就会抛出异常 checkHeartTimeout$2() { // if playing is true and playback pause is false if (isFalse(this.playbackPause) && this.playing) { // check is destroyed if (this.isDestroyedOrClosed()) { this.debug && this.debug.warn(this.TAG_NAME, `checkHeartTimeout$2 but player is destroyed`); return; } // check again if (isFalse(this.isHistoryFpsListAllZero())) { this.debug && this.debug.warn(this.TAG_NAME, `checkHeartTimeout$2 but fps is not all zero`); return; } // check again if (this._stats.fps !== 0) { this.debug && this.debug.warn(this.TAG_NAME, `checkHeartTimeout$2 but fps is ${this._stats.fps}`); return; } // if is not visibility (setInterval emit time is 1s), use vbps to check if (isFalse(this.visibility)) { if (this._stats.vbps !== 0) { this.debug && this.debug.warn(this.TAG_NAME, `checkHeartTimeout$2 but page is not visibility and vbps is ${this._stats.vbps}`); return; } } const historyFpsListString = this._historyFpsList.join(','); this.debug.warn(this.TAG_NAME, `checkHeartTimeout$2 and pause and emit delayTimeout event and current vbps is ${this._stats.vbps} and current fps is ${this._stats.fps} and history FpsList is ${historyFpsListString} and current visibility is ${this.visibility} and`); this.emit(EVENTS.timeout, EVENTS.delayTimeout); this.emit(EVENTS.delayTimeout); } else { this.debug.log(this.TAG_NAME, `checkHeartTimeout$2 playbackPause is ${this.playbackPause}, playing is ${this.playing}`); } } checkStatsInterval() { this._checkStatsInterval = setInterval(() => { this.updateStats(); }, 1000); } // loading 等待时间 checkLoadingTimeout() { this._checkLoadingTimeout = setTimeout(() => { // check again if (this.playing) { this.debug.warn(this.TAG_NAME, `checkLoadingTimeout but loading is ${this.loading} and playing is ${this.playing}`); return; } if (this.isDestroyedOrClosed()) { this.debug && this.debug.warn(this.TAG_NAME, `checkLoadingTimeout but player is destroyed`); return; } this.debug.warn(this.TAG_NAME, `checkLoadingTimeout and pause and emit loadingTimeout event`); this.emit(EVENTS.timeout, EVENTS.loadingTimeout); this.emit(EVENTS.loadingTimeout); }, this._opt.loadingTimeout * 1000); } clearCheckLoadingTimeout() { if (this._checkLoadingTimeout) { this.debug.log(this.TAG_NAME, `clearCheckLoadingTimeout`); clearTimeout(this._checkLoadingTimeout); this._checkLoadingTimeout = null; } } clearStatsInterval() { if (this._checkStatsInterval) { clearInterval(this._checkStatsInterval); this._checkStatsInterval = null; } } handleRender() { if (this.isDestroyedOrClosed()) { this.debug && this.debug.warn(this.TAG_NAME, `handleRender but player is destroyed`); return; } if (this.loading) { // this.clearCheckLoadingTimeout(); // first emit loading event this.loading = false; // next emit start event this.emit(EVENTS.start); } if (!this.playing) { this.playing = true; } } // updateStats() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (!this._startBpsTime) { this._startBpsTime = now$2(); } if (isNotEmpty(options.ts)) { const ts = parseInt(options.ts, 10); this._stats.ts = ts; if (this._playingStartTimestamp === null && ts > 0) { this._playingStartTimestamp = ts; } } if (isNotEmpty(options.dts)) { this._stats.dts = parseInt(options.dts, 10); } if (isNotEmpty(options.mseTs)) { this._stats.mseTs = options.mseTs; } if (isNotEmpty(options.buf)) { this._stats.buf = options.buf; } if (isNotEmpty(options.netBuf)) { this._stats.netBuf = options.netBuf; } if (isNotEmpty(options.currentPts)) { this._stats.currentPts = options.currentPts; } if (options.fps) { this._stats.fps += 1; } if (options.dfps) { this._stats.dfps += 1; } if (options.abps) { this._stats.abps += options.abps; } if (options.vbps) { this._stats.vbps += options.vbps; } if (options.workerStats) { this._tempWorkerStats = options.workerStats; } if (options.isDropping) { this._stats.isDropping = options.isDropping; } if (options.mseVideoBufferDelayTime) { // s -> ms this._stats.mseVideoBufferDelayTime = parseInt(options.mseVideoBufferDelayTime * 1000, 10); } const _nowTime = now$2(); const timestamp = _nowTime - this._startBpsTime; // waiting one second if (timestamp < 1 * 1000) { return; } if (this._playingStartTimestamp !== null) { if (this._stats.fps > 0) { this._stats.pTs += 1; } } let sourceBufferDelay = 0; let sourceBufferStore = 0; let decodeDiffTimes = 0; let decodePlaybackRate = 0; let msePendingBuffer = 0; // just for mse if (this._opt.useMSE) { if (this.mseDecoder) { sourceBufferDelay = this.mseDecoder.checkSourceBufferDelay(); sourceBufferDelay = parseInt(sourceBufferDelay * 1000, 10); sourceBufferStore = this.mseDecoder.checkSourceBufferStore(); sourceBufferStore = sourceBufferStore.toFixed(2); decodeDiffTimes = this.mseDecoder.getDecodeDiffTimes(); decodePlaybackRate = this.mseDecoder.getDecodePlaybackRate(); msePendingBuffer = this.mseDecoder.getPendingSegmentsLength(); } else if (this.isMseDecoderUseWorker()) { sourceBufferDelay = this.video.checkSourceBufferDelay(); sourceBufferDelay = parseInt(sourceBufferDelay * 1000, 10); sourceBufferStore = this.video.checkSourceBufferStore(); sourceBufferStore = sourceBufferStore.toFixed(2); decodePlaybackRate = this.video.getDecodePlaybackRate(); // todo: 缺这两个数据 // decodeDiffTimes // msePendingBuffer = this.video.getPendingSegmentsLength(); } } // just for wcs if (this._opt.useWCS && this.webcodecsDecoder) { decodeDiffTimes = this.webcodecsDecoder.getDecodeDiffTimes(); } // just for hls.js if (this.isOldHls() && this.hlsDecoder) { sourceBufferDelay = this.hlsDecoder.checkHlsBufferedDelay(); sourceBufferDelay = parseInt(sourceBufferDelay * 1000, 10); } let hlsDemuxBufferedLength = 0; let hlsDemuxAudioBufferedLength = 0; let hlsDemuxVideoBufferedLength = 0; // just for hls 265 if (this.isUseHls265() && this.hlsDecoder) { sourceBufferDelay = this.hlsDecoder.checkHlsBufferedDelay(); sourceBufferDelay = sourceBufferDelay.toFixed(2); // 这里就是ms的。 hlsDemuxBufferedLength = this.hlsDecoder.getDemuxBufferedListLength(); hlsDemuxVideoBufferedLength = this.hlsDecoder.getDemuxVideoBufferedListLength(); hlsDemuxAudioBufferedLength = this.hlsDecoder.getDemuxAudioBufferedListLength(); } // just for wasm let flvBufferByteLength = 0; let demuxBufferDelay = 0; let audioDemuxBufferDelay = 0; let isStreamTsMoreThanLocal = false; let pushLatestDelay = 0; if (this._opt.useWasm || this._opt.demuxUseWorker) { //wasm | demux if (this._tempWorkerStats) { demuxBufferDelay = this._tempWorkerStats.demuxBufferDelay; audioDemuxBufferDelay = this._tempWorkerStats.audioDemuxBufferDelay; flvBufferByteLength = this._tempWorkerStats.flvBufferByteLength; this._stats.netBuf = this._tempWorkerStats.netBuf; pushLatestDelay = this._tempWorkerStats.pushLatestDelay; isStreamTsMoreThanLocal = this._tempWorkerStats.isStreamTsMoreThanLocal; this._stats.buf = this._tempWorkerStats.latestDelay; } } else { // mse or wcs if (this.demux) { flvBufferByteLength = this.demux.getInputByteLength(); pushLatestDelay = this.demux.getPushLatestDelay(); isStreamTsMoreThanLocal = this.demux.getIsStreamTsMoreThanLocal(); if (this.demux.bufferList) { demuxBufferDelay = this.demux.bufferList.length; } } } // just for audio let audioBufferDelay = 0; let audioBufferDelayTs = 0; if (this.audio && this.audio.bufferList) { audioBufferDelay = this.audio.bufferList.length; audioBufferDelayTs = parseInt(audioBufferDelay * this.audio.oneBufferDuration, 10); } // just for playback let playbackVideoBufferDelay = 0; let playbackVideoWaitingBufferDelay = 0; let playbackAudioWaitingBufferDelay = 0; let playbackCacheDataDuration = 0; if (this.isPlayback()) { if (this.video) { if (this._opt.playbackConfig.isUseFpsRender) { playbackVideoBufferDelay = this.video.bufferList && this.video.bufferList.length || 0; } let fps = this.video.getStreamFps(); const oneFrameTs = fps > 0 ? 1000 / fps : 0; // ms const videoBufferDelayTs = oneFrameTs * playbackVideoBufferDelay; const demuxBufferDelayTs = oneFrameTs * demuxBufferDelay; playbackCacheDataDuration = parseInt(videoBufferDelayTs + demuxBufferDelayTs, 10); } } let delayTs = 0; // if (this.videoTimestamp > 0) { delayTs = this._stats.dts - this.videoTimestamp; } const totalDelayTs = delayTs + this._stats.netBuf; if (this.isOldHls()) { this._stats.fps = this.hlsDecoder.getFps(); } if (this._stats.fps > this._stats.maxFps) { this._stats.maxFps = this._stats.fps; } let videoCurrentTime = this.getVideoCurrentTime(); const prevVideoCurrentTime = this._stats.videoCurrentTime; let videoCurrentTimeDiff = -1; if (prevVideoCurrentTime && videoCurrentTime) { videoCurrentTimeDiff = (videoCurrentTime - prevVideoCurrentTime).toFixed(2); videoCurrentTime = videoCurrentTime.toFixed(2); } let audioSyncVideo = 0; if (this.audioTimestamp > 0) { audioSyncVideo = this.audioTimestamp - this.getRenderCurrentPts(); } this._allStatsData = Object.assign(this._stats, { audioBuffer: audioBufferDelay, audioBufferDelayTs: audioBufferDelayTs, audioTs: this.audioTimestamp, latestAudioTs: this.latestAudioTimestamp, // playback playbackVideoBuffer: playbackVideoBufferDelay, playbackVideoWaitingBuffer: playbackVideoWaitingBufferDelay, playbackAudioWaitingBuffer: playbackAudioWaitingBufferDelay, playbackCacheDataDuration, demuxBuffer: demuxBufferDelay, pushLatestDelay: pushLatestDelay, audioDemuxBuffer: audioDemuxBufferDelay, flvBuffer: flvBufferByteLength, // mse mseDelay: sourceBufferDelay, mseStore: sourceBufferStore, mseDecodeDiffTimes: decodeDiffTimes, mseDecodePlaybackRate: decodePlaybackRate, msePendingBuffer, // wcs wcsDecodeDiffTimes: decodeDiffTimes, // hls hlsDelay: sourceBufferDelay, hlsDemuxLength: hlsDemuxBufferedLength, hlsDemuxAudioLength: hlsDemuxAudioBufferedLength, hlsDemuxVideoLength: hlsDemuxVideoBufferedLength, delayTs, totalDelayTs, isStreamTsMoreThanLocal, // for video videoCurrentTime, videoCurrentTimeDiff, audioSyncVideo }); let performance = null; let videoSmooth = null; let videoSmoothInfo = ''; if (this.isPlayer() && this._opt.hasVideo && this.playing) { performance = fpsStatus(this._stats.fps, this._flvMetaDataFps); this._allStatsData.performance = performance; const checkResult = this.checkVideoSmooth(this._allStatsData); videoSmoothInfo = checkResult.reason; videoSmooth = checkResult.result; this._allStatsData.videoSmooth = videoSmooth; } this.emit(EVENTS.stats, this._allStatsData); // emit error if (this._allStatsData.flvBuffer > FLV_BUFFER_LARGE_SIZE) { this.emit(EVENTS_ERROR.flvDemuxBufferSizeTooLarge, this._allStatsData.flvBuffer); } // if (this._opt.hasVideo) { this.updateHistoryFpsList(this._stats.fps, this._stats.videoCurrentTimeDiff); if (isNotEmpty(performance)) { this.emit(EVENTS.performance, performance); } if (isNotEmpty(videoSmooth)) { this.emit(EVENTS.videoSmooth, videoSmooth, videoSmoothInfo); } } else { if (this._opt.hasAudio) { // use abps to check this.updateHistoryFpsList(this._stats.abps, 0); } } this._stats.fps = 0; this._stats.dfps = 0; this._stats.abps = 0; this._stats.vbps = 0; this._stats.isDropping = false; this._startBpsTime = _nowTime; } resetStats() { this._startBpsTime = null; this._playingStartTimestamp = null; this._historyFpsList = []; this._historyVideoDiffList = []; this._stats = { buf: 0, //ms netBuf: 0, // fps: 0, // maxFps: 0, // dfps: 0, // abps: 0, // vbps: 0, // ts: 0, // mseTs: 0, // currentPts: 0, // pTs: 0, // dts: 0, // mseVideoBufferDelayTime: 0, // ms isDropping: false }; this._allStatsData = {}; } // 检查video的是否丝滑 checkVideoSmooth(stats) { let result = true; let reason = ''; this._opt.videoBuffer + this._opt.videoBufferDelay; if (this.isWebrtcH264() || this.isOldHls()) { return { result, reason }; } // 没有网速 if (stats.vbps === 0 && isFalse(this._opt.isHls)) { reason = 'vbpsIsZero'; this.debug.log(this.TAG_NAME, `checkVideoSmooth false because ${reason}`); result = false; } if (result) { // 丢帧 if (stats.isDropping) { reason = 'isDroppingIsTrue'; this.debug.log(this.TAG_NAME, `checkVideoSmooth false because ${reason}`); result = false; } } if (result) { // visibility is true if (this.visibility) { if (this._historyFpsList.length >= this._opt.heartTimeout) { const calcList = removeMaxAndMin(this._historyFpsList); // 算出平均值 const avgFps = calcList.reduce((a, b) => a + b, 0) / calcList.length; const choppyFps = avgFps * 0.83; if (stats.fps < choppyFps) { reason = 'fpsIsLow'; this.debug.log(this.TAG_NAME, `checkVideoSmooth false because fps is ${stats.fps} < min fix is ${choppyFps} and avgFps is ${avgFps}`); result = false; } } } } if (result) { if (this.visibility && this._opt.useMSE) { if ((stats.videoCurrentTimeDiff >= 1.5 || stats.videoCurrentTimeDiff <= 0.5) && stats.videoCurrentTimeDiff !== -1) { reason = 'videoCurrentTimeDiffIsNotNormal'; this.debug.log(this.TAG_NAME, `checkVideoSmooth false because videoCurrentTimeDiff is ${stats.videoCurrentTimeDiff}`); result = false; } } } return { result, reason }; } enableWakeLock() { if (this._opt.keepScreenOn) { this.keepScreenOn && this.keepScreenOn.enable(); } } releaseWakeLock() { if (this._opt.keepScreenOn) { this.keepScreenOn && this.keepScreenOn.disable(); } } clearBufferDelay() { if (this._opt.useWasm) { if (this.decoderWorker) { this.decoderWorker.clearWorkBuffer(true); } } else { if (this.demux) { this.demux.clearBuffer(true); } } } doDestroy() { this.emit(EVENTS.beforeDestroy); } handlePlayToRenderTimes() { if (this.isDestroyedOrClosed()) { this.debug.log(this.TAG_NAME, `handlePlayToRenderTimes but player is closed or destroyed`); return; } const _times = this.getPlayToRenderTimes(); this.emit(EVENTS.playToRenderTimes, _times); } getPlayToRenderTimes() { const _times = this._times; // play init time _times.playTimestamp = _times.playStart - _times.playInitStart; // stream start time _times.streamTimestamp = _times.streamStart - _times.playStart; // stream response time _times.streamResponseTimestamp = _times.streamResponse - _times.streamStart > 0 ? _times.streamResponse - _times.streamStart : 0; // demux time _times.demuxTimestamp = _times.demuxStart - _times.streamResponse > 0 ? _times.demuxStart - _times.streamResponse : 0; // decode times _times.decodeTimestamp = _times.decodeStart - _times.demuxStart > 0 ? _times.decodeStart - _times.demuxStart : 0; // start render time _times.videoTimestamp = _times.videoStart - _times.decodeStart; // all time _times.allTimestamp = _times.videoStart - _times.playInitStart; return _times; } getOption() { return this._opt; } getPlayType() { return this._opt.playType; } isPlayer() { return this._opt.playType === PLAY_TYPE.player; } isPlayback() { return this._opt.playType === PLAY_TYPE.playbackTF; } isDemuxSetCodecInit() { let result = true; let _opt = this._opt; if (_opt.useWCS && !_opt.useOffscreen) { if (this.webcodecsDecoder) { result = this.webcodecsDecoder.hasInit; } else { result = false; } } else if (_opt.useMSE) { if (this.mseDecoder) { result = this.mseDecoder.hasInit; } else { result = false; } } return result; } isDemuxDecodeFirstIIframeInit() { let result = true; let _opt = this._opt; if (_opt.useWCS && !_opt.useOffscreen) { if (this.webcodecsDecoder) { result = this.webcodecsDecoder.isDecodeFirstIIframe; } else { result = false; } } else if (_opt.useMSE) { if (this.mseDecoder) { result = this.mseDecoder.isDecodeFirstIIframe; } else { result = false; } } return result; } isAudioPlaybackRateSpeed() { let result = false; if (this.audio) { result = this.audio.isPlaybackRateSpeed(); } return result; } getPlayingTimestamp() { return this._stats.pTs; } getRecordingType() { let result = null; if (this.recorder) { result = this.recorder.getType(); } return result; } getRecordingByteLength() { let result = 0; if (this.recording) { result = this.recorder.getToTalByteLength(); } return result; } getRecordingDuration() { let result = 0; if (this.recording) { result = this.recorder.getTotalDuration(); } return result; } getDecodeType() { let result = ''; const options = this.getOption(); if (this.isWebrtcH264()) { return DECODE_TYPE.webrtc; } if (this.isAliyunRtc()) { return DECODE_TYPE.aliyunRtc; } if (this.isOldHls()) { return DECODE_TYPE.hls; } if (options.useMSE) { result += DECODE_TYPE.mse + ' '; if (options.mseDecoderUseWorker) { result += 'worker'; } } if (options.useWCS) { result += DECODE_TYPE.wcs + ' '; } if (options.useWasm) { result += DECODE_TYPE.wasm + ' '; if (options.useSIMD) { result += DECODE_TYPE.simd + ' '; } if (options.useMThreading) { result += DECODE_TYPE.mt + ' '; } } if (options.useOffscreen) { result += DECODE_TYPE.offscreen + ' '; } return result; } getDemuxType() { let result = ''; const options = this.getOption(); result = options.demuxType; return result; } getRenderType() { let result = ''; if (this.video) { result = this.video.getType(); } return result; } getCanvasRenderType() { let result = ''; if (this.video) { result = this.video.getCanvasType(); } return result; } getAudioEngineType() { let result = ''; if (this.audio) { result = this.audio.getEngineType(); } return result; } getStreamType() { let result = ''; if (this.stream) { result = this.stream.getStreamType(); } return result; } getAllStatsData() { return this._allStatsData; } togglePerformancePanel(toggle) { this.updateOption({ showPerformance: toggle }); this.emit(EVENTS.togglePerformancePanel, toggle); } setScaleMode(type) { type = Number(type); let options = { isFullResize: false, isResize: false, aspectRatio: 'default' }; switch (type) { case SCALE_MODE_TYPE.full: options.isFullResize = false; options.isResize = false; break; case SCALE_MODE_TYPE.auto: options.isFullResize = false; options.isResize = true; break; case SCALE_MODE_TYPE.fullAuto: options.isFullResize = true; options.isResize = true; break; } this.updateOption(options); this.resize(); this.emit(EVENTS.viewResizeChange, type); } startVisibilityHiddenTimeout() { this.clearVisibilityHiddenTimeout(); // if (this._opt.pageVisibilityHiddenTimeout > 0) { this.visibilityHiddenTimeout = setTimeout(() => { this.emit(EVENTS.visibilityHiddenTimeout); }, this._opt.pageVisibilityHiddenTimeout * 1000); } } clearVisibilityHiddenTimeout() { if (this._checkVisibleHiddenTimeout) { clearTimeout(this._checkVisibleHiddenTimeout); this._checkVisibleHiddenTimeout = null; } } faceDetect(toggle) { this.faceDetectActive = toggle; if (isFalse(toggle)) { // clear face detect if (this.video) { this.video.addAiContentToCanvas([]); } } } objectDetect(toggle) { this.objectDetectActive = toggle; if (isFalse(toggle)) { // clear object detect if (this.video) { this.video.addAiContentToCanvas([]); } } } occlusionDetect(toggle) { this.occlusionDetectActive = toggle; } // todo:供测试使用 downloadNakedFlowFile() { if (this.demux && this.demux.downloadNakedFlowFile) { this.demux.downloadNakedFlowFile(); } } // todo:供测试使用 downloadFmp4File() { if (this.demux && this.demux.downloadFmp4File) { this.demux.downloadFmp4File(); } } // todo:供测试使用 downloadMpeg4File() { const blob = new Blob([this._tempStreamList]); try { const oa = document.createElement('a'); oa.href = window.URL.createObjectURL(blob); oa.download = Date.now() + '.mpeg4'; oa.click(); window.URL.revokeObjectURL(oa.href); } catch (e) { console.error('downloadMpeg4File', e); } } hasCacheOnGopBuffer() { const gopTimestamp = this.videoIframeIntervalTs; const delayBufferLength = this._allStatsData.demuxBuffer; const fps = this._allStatsData.maxFps; let result = false; if (gopTimestamp && delayBufferLength && fps) { const allTimestamp = 1000 / fps * delayBufferLength; result = allTimestamp > gopTimestamp; } return result; } addContentToCanvas() { let contentList = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; if (this.video) { this.video.addContentToCanvas(contentList); } } addContentToContainer() { } sendWebsocketMessage(msg) { const streamType = this.getStreamType(); if (streamType === PLAYER_STREAM_TYPE.websocket || streamType === PLAYER_STREAM_TYPE.worker + " " + PLAYER_STREAM_TYPE.websocket) { this.stream.sendMessage(msg); } else { this.debug.warn(this.TAG_NAME, `sendWebsocketMessage: stream type is not websocket, current stream type is ${this.getStreamType()}`); } } checkIsInRender() { const _stats = this._stats; // return _stats.vbps > 0 && _stats.fps > 0; } setControlHtml(html) { if (this.control && this.control.$controlHtml) { this.control.$controlHtml.innerHTML = html; } } clearControlHtml() { if (this.control && this.control.$controlHtml) { this.control.$controlHtml.innerHTML = ''; } } updateWatermark(config) { if (this.singleWatermark) { this.singleWatermark.update(config); } } removeWatermark() { if (this.singleWatermark) { this.singleWatermark.remove(); } } getVideoInfo() { let result = null; if (this.video) { result = this.video.getVideoInfo(); } return result; } getAudioInfo() { let result = null; if (this.audio) { result = this.audio.getAudioInfo(); } return result; } getVideoPlaybackQuality() { let result = null; if (this.video) { result = this.video.getPlaybackQuality(); } return result; } emitError(errorType) { let message = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; this.emit(EVENTS.error, errorType, message); this.emit(errorType, message); } updateHistoryFpsList(fps, videoDiff) { if (this.playbackPause) { return; } if (this._historyFpsList.length > this._opt.heartTimeout) { this._historyFpsList.shift(); this._historyVideoDiffList.shift(); } this._historyFpsList.push(fps); this._historyVideoDiffList.push(videoDiff); if (this.isHistoryFpsListAllZero()) { this.checkHeartTimeout$2(); } } isHistoryFpsListAllZero() { let result = true; if (this._historyFpsList.length < this._opt.heartTimeout) { result = false; } if (result) { for (let i = 0; i < this._historyFpsList.length; i++) { if (this._historyFpsList[i] > 0) { result = false; break; } } } if (result) { for (let i = 0; i < this._historyVideoDiffList.length; i++) { if (this._historyVideoDiffList[i] > 0) { result = false; break; } } } return result; } isUseHls265() { return isTrue(this._opt.isHls) && isTrue(this._opt.supportHls265); } isHls() { return isTrue(this._opt.isHls); } isOldHls() { return isTrue(this._opt.isHls) && isFalse(this._opt.supportHls265); } isWebrtcNotH265() { return isTrue(this._opt.isWebrtc) && isFalse(this._opt.isWebrtcH265); } isWebrtcH264() { return isTrue(this._opt.isWebrtc) && isFalse(this._opt.isWebrtcH265); } isWebrtcH265() { return isTrue(this._opt.isWebrtc) && isTrue(this._opt.isWebrtcH265); } isAliyunRtc() { return isTrue(this._opt.isAliyunRtc); } isUseHls265UseMse() { return this.isUseHls265() && this.isUseMSE(); } isStreamWebTransport() { return this.getStreamType() === PLAYER_STREAM_TYPE.webTransport; } // isPlaybackCacheBeforeDecodeForFpsRender() { return this.isPlayback() && isTrue(this._opt.playbackConfig.isCacheBeforeDecodeForFpsRender) && isTrue(this._opt.useWCS); } isPlaybackUseWCS() { return this.isPlayback() && isTrue(this._opt.useWCS); } isPlaybackUseMSE() { return this.isPlayback() && isTrue(this._opt.useMSE); } isPlayUseMSE() { return this.isPlayer() && isTrue(this._opt.useMSE); } isInWebFullscreen() { return this._opt.useWebFullScreen && isMobile() && this.fullscreen; } getPlaybackRate() { let result = 1; if (isTrue(this.isPlayback()) && this.playback) { result = this.playback.rate; } return result; } isPlaybackOnlyDecodeIFrame() { return isTrue(this.isPlayback()) && this.getPlaybackRate() >= this._opt.playbackForwardMaxRateDecodeIFrame; } pushTempStream(stream) { const arrayBuffer = new Uint8Array(stream); this._tempStreamList.push(arrayBuffer); } updateLoadingText(text) { if (this.loading && this.control) { this.control.updateLoadingText(text); } } // for video element getVideoCurrentTime() { let currentTime = 0; if (this.video) { if (this._opt.useMSE) { if (this.mseDecoder) { currentTime = this.mseDecoder.getVideoCurrentTime(); } else if (this.isMseDecoderUseWorker()) { currentTime = this.video.getVideoCurrentTime(); } } else if (this.isWebrtcH264() && this.webrtc) { currentTime = this.webrtc.getVideoCurrentTime(); } else if (this.isAliyunRtc() && this.aliyunRtcDecoder) { currentTime = this.aliyunRtcDecoder.getVideoCurrentTime(); } } return currentTime; } addMemoryLog(tag) { for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } this.emit(EVENTS.memoryLog, ` [${tag}] `, ...args); } downloadMemoryLog() { this.emit(EVENTS.downloadMemoryLog); } // is in multi isInMulti() { return this._opt.multiIndex !== -1; } // isWebrtcForM7S() { return isFalse(this._opt.isWebrtcForZLM) && isFalse(this._opt.isWebrtcForSRS) && isFalse(this._opt.isWebrtcForOthers); } updateMetaData(metaData) { this.debug.log(this.TAG_NAME, 'updateMetaData', JSON.stringify(metaData)); this._flvMetaData = metaData; if (this._flvMetaData) { const metaData = this._flvMetaData; if (isNumber(metaData.framerate)) { const fpsNum = Math.floor(metaData.framerate * 1000); if (fpsNum > 0) { const fps = fpsNum / 1000; this._flvMetaDataFps = fps; } } if (isBoolean(metaData.hasAudio) && isFalse(metaData.hasAudio)) { this.debug.log('updateMetaData', 'hasAudio', metaData.hasAudio, 'and update _opt.hasAudio'); this._opt.hasAudio = metaData.hasAudio; } if (isBoolean(metaData.hasVideo) && isFalse(metaData.hasVideo)) { this.debug.log('updateMetaData', 'hasVideo', metaData.hasVideo, 'and update _opt.hasVideo'); this._opt.hasVideo = metaData.hasVideo; } } this.emit(EVENTS.flvMetaData, metaData); } getMetaData() { return this._flvMetaData; } getExtendBtnList() { const extendBtnList = this.control.getExtendBtnList(); return extendBtnList.map(item => { return { name: item.name, $container: item.$iconContainer, $btn: item.$iconWrap, $activeBtn: item.$activeIconWrap }; }); } getCpuLevel() { let result = null; if (this.pressureObserverCpu) { result = this.pressureObserverCpu.getCurrentCpuState(); } return result; } isRecordTypeFlv() { return this.recorder && this._opt.recordType === FILE_SUFFIX.flv; } isRecordTypeMp4() { return this.recorder && this._opt.recordType === FILE_SUFFIX.mp4; } isRecordTypeWebm() { return this.recorder && this._opt.recordType === FILE_SUFFIX.webm; } isDemuxInWorker() { return this._opt.useWasm || this._opt.demuxUseWorker; } isUseMSE() { return isTrue(this._opt.useMSE); } isUseWCS() { return isTrue(this._opt.useWCS); } isUseWASM() { return isTrue(this._opt.useWasm); } isMseDecoderUseWorker() { return this.isUseMSE() && isTrue(this._opt.mseDecoderUseWorker); } getAudioSyncVideoDiff() { let result = this.audioTimestamp - this.getRenderCurrentPts(); return result; } getMseVideoBufferDelayTime() { let result = 0; if (this._opt.useMSE && this.mseDecoder) { if (this.mseDecoder) { // s result = this.mseDecoder.getVideoBufferDelayTime(); } else if (this.isMseDecoderUseWorker()) { // s result = this.video.getVideoBufferDelayTime(); } // s -> ms result = parseInt(result * 1000, 10); } return result; } updateCurrentPts(pts) { this.updateStats({ currentPts: pts }); this.emit(EVENTS.currentPts, pts); } // 获取到渲染的最新pts getRenderCurrentPts() { let result = 0; if (this._stats.currentPts) { result = this._stats.currentPts; } else { result = this.videoTimestamp - this.getMseVideoBufferDelayTime(); } return result; } openSyncAudioAndVideo() { return this._opt.syncAudioAndVideo && this._opt.hasVideo; } showTipsMessageByCode(code) { if (this.control) { const message = this._opt.showMessageConfig[code] || '未知异常'; this.control.showTipsMessage(message, code); } } showTipsMessageByContent(content) { if (this.control && content) { this.control.showTipsMessage(content); } } hideTipsMessage() { if (this.control) { this.control.hideTipsMessage(); } } decoderCheckFirstIFrame() { if (isTrue(this._opt.checkFirstIFrame)) { if (this.mseDecoder) { this.mseDecoder.isDecodeFirstIIframe = false; } else if (this.webcodecsDecoder) { this.webcodecsDecoder.isDecodeFirstIIframe = false; } } } isHlsCanVideoPlay() { return this._canPlayAppleMpegurl && this.isOldHls(); } setPtzPosition(position) { if (this.control) { this.control.updatePtzPosition(position); } } setPlayFailedAndPaused() { this.isPlayFailedAndPaused = true; } } class Resampler { constructor(options) { const { fromSampleRate, toSampleRate, channels, inputBufferSize } = options; if (!fromSampleRate || !toSampleRate || !channels) { throw new Error("Invalid settings specified for the resampler."); } this.resampler = null; this.fromSampleRate = fromSampleRate; this.toSampleRate = toSampleRate; this.channels = channels || 0; this.inputBufferSize = inputBufferSize; this.initialize(); } initialize() { if (this.fromSampleRate == this.toSampleRate) { // Setup resampler bypass - Resampler just returns what was passed through this.resampler = buffer => { return buffer; }; this.ratioWeight = 1; } else { if (this.fromSampleRate < this.toSampleRate) { // Use generic linear interpolation if upsampling, // as linear interpolation produces a gradient that we want // and works fine with two input sample points per output in this case. this.linearInterpolation(); this.lastWeight = 1; } else { // Custom resampler I wrote that doesn't skip samples // like standard linear interpolation in high downsampling. // This is more accurate than linear interpolation on downsampling. this.multiTap(); this.tailExists = false; this.lastWeight = 0; } // Initialize the internal buffer: this.initializeBuffers(); this.ratioWeight = this.fromSampleRate / this.toSampleRate; } } bufferSlice(sliceAmount) { //Typed array and normal array buffer section referencing: try { return this.outputBuffer.subarray(0, sliceAmount); } catch (error) { try { //Regular array pass: this.outputBuffer.length = sliceAmount; return this.outputBuffer; } catch (error) { //Nightly Firefox 4 used to have the subarray function named as slice: return this.outputBuffer.slice(0, sliceAmount); } } } initializeBuffers() { this.outputBufferSize = Math.ceil(this.inputBufferSize * this.toSampleRate / this.fromSampleRate / this.channels * 1.000000476837158203125) + this.channels + this.channels; try { this.outputBuffer = new Float32Array(this.outputBufferSize); this.lastOutput = new Float32Array(this.channels); } catch (error) { this.outputBuffer = []; this.lastOutput = []; } } linearInterpolation() { this.resampler = buffer => { let bufferLength = buffer.length, channels = this.channels, outLength, ratioWeight, weight, firstWeight, secondWeight, sourceOffset, outputOffset, outputBuffer, channel; if (bufferLength % channels !== 0) { throw new Error("Buffer was of incorrect sample length."); } if (bufferLength <= 0) { return []; } outLength = this.outputBufferSize; ratioWeight = this.ratioWeight; weight = this.lastWeight; firstWeight = 0; secondWeight = 0; sourceOffset = 0; outputOffset = 0; outputBuffer = this.outputBuffer; for (; weight < 1; weight += ratioWeight) { secondWeight = weight % 1; firstWeight = 1 - secondWeight; this.lastWeight = weight % 1; for (channel = 0; channel < this.channels; ++channel) { outputBuffer[outputOffset++] = this.lastOutput[channel] * firstWeight + buffer[channel] * secondWeight; } } weight -= 1; for (bufferLength -= channels, sourceOffset = Math.floor(weight) * channels; outputOffset < outLength && sourceOffset < bufferLength;) { secondWeight = weight % 1; firstWeight = 1 - secondWeight; for (channel = 0; channel < this.channels; ++channel) { outputBuffer[outputOffset++] = buffer[sourceOffset + (channel > 0 ? channel : 0)] * firstWeight + buffer[sourceOffset + (channels + channel)] * secondWeight; } weight += ratioWeight; sourceOffset = Math.floor(weight) * channels; } for (channel = 0; channel < channels; ++channel) { this.lastOutput[channel] = buffer[sourceOffset++]; } return this.bufferSlice(outputOffset); }; } multiTap() { this.resampler = buffer => { let bufferLength = buffer.length, outLength, output_variable_list, channels = this.channels, ratioWeight, weight, channel, actualPosition, amountToNext, alreadyProcessedTail, outputBuffer, outputOffset, currentPosition; if (bufferLength % channels !== 0) { throw new Error("Buffer was of incorrect sample length."); } if (bufferLength <= 0) { return []; } outLength = this.outputBufferSize; output_variable_list = []; ratioWeight = this.ratioWeight; weight = 0; actualPosition = 0; amountToNext = 0; alreadyProcessedTail = !this.tailExists; this.tailExists = false; outputBuffer = this.outputBuffer; outputOffset = 0; currentPosition = 0; for (channel = 0; channel < channels; ++channel) { output_variable_list[channel] = 0; } do { if (alreadyProcessedTail) { weight = ratioWeight; for (channel = 0; channel < channels; ++channel) { output_variable_list[channel] = 0; } } else { weight = this.lastWeight; for (channel = 0; channel < channels; ++channel) { output_variable_list[channel] = this.lastOutput[channel]; } alreadyProcessedTail = true; } while (weight > 0 && actualPosition < bufferLength) { amountToNext = 1 + actualPosition - currentPosition; if (weight >= amountToNext) { for (channel = 0; channel < channels; ++channel) { output_variable_list[channel] += buffer[actualPosition++] * amountToNext; } currentPosition = actualPosition; weight -= amountToNext; } else { for (channel = 0; channel < channels; ++channel) { output_variable_list[channel] += buffer[actualPosition + (channel > 0 ? channel : 0)] * weight; } currentPosition += weight; weight = 0; break; } } if (weight === 0) { for (channel = 0; channel < channels; ++channel) { outputBuffer[outputOffset++] = output_variable_list[channel] / ratioWeight; } } else { this.lastWeight = weight; for (channel = 0; channel < channels; ++channel) { this.lastOutput[channel] = output_variable_list[channel]; } this.tailExists = true; break; } } while (actualPosition < bufferLength && outputOffset < outLength); return this.bufferSlice(outputOffset); }; } resample(buffer) { if (this.fromSampleRate == this.toSampleRate) { this.ratioWeight = 1; } else { if (this.fromSampleRate < this.toSampleRate) { this.lastWeight = 1; } else { this.tailExists = false; this.lastWeight = 0; } this.initializeBuffers(); this.ratioWeight = this.fromSampleRate / this.toSampleRate; } return this.resampler(buffer); } } const QUANT_MASK = 0xf; const SEG_SHIFT = 4; const BIAS = 0x84; const segEnd = [0xFF, 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF]; function _search(val, table, size) { for (let i = 0; i < size; i++) { if (val <= table[i]) { return i; } } return size; } // alaw function _linear2alaw(pcmVal) { let mask; let seg; let aval; if (pcmVal >= 0) { mask = 0xD5; } else { mask = 0x55; // pcmVal = -pcmVal - 8; pcmVal = -pcmVal - 1; if (pcmVal < 0) { pcmVal = 32767; } } /* Convert the scaled magnitude to segment number. */ seg = _search(pcmVal, segEnd, 8); if (seg >= 8) { return 0x7F ^ mask; } else { aval = seg << SEG_SHIFT; if (seg < 2) { aval |= pcmVal >> 4 & QUANT_MASK; } else { aval |= pcmVal >> seg + 3 & QUANT_MASK; } return aval ^ mask; } } // ulaw function _linear2ulaw(pcmVal) { let mask = 0; if (pcmVal < 0) { pcmVal = BIAS - pcmVal; mask = 0x7F; } else { pcmVal += BIAS; mask = 0xFF; } let seg = _search(pcmVal, segEnd, 8); if (seg >= 8) { return 0x7F ^ mask; } else { let uval = seg << 4 | pcmVal >> seg + 3 & 0xF; return uval ^ mask; } } // pcm to g711a function g711aEncoder(typedArray) { const g711Array = []; const tempArray = Array.prototype.slice.call(typedArray); tempArray.forEach((i, index) => { g711Array[index] = _linear2alaw(i); }); return g711Array; } // g711u function g711uEncoder(typedArray) { const g711Array = []; const tempArray = Array.prototype.slice.call(typedArray); tempArray.forEach((i, index) => { g711Array[index] = _linear2ulaw(i); }); return g711Array; } class Talk extends Emitter { constructor(player) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; super(); /**@type {import('./constant').DEFAULT_TALK_OPTIONS}*/ this._opt = {}; if (player) { this.player = player; } this.tag = 'talk'; const defaultOptions = getDefaultTalkOptions(); this._opt = Object.assign({}, defaultOptions, options); this._opt.sampleRate = parseInt(this._opt.sampleRate, 10); this._opt.sampleBitsWidth = parseInt(this._opt.sampleBitsWidth, 10); this.audioContext = null; this.gainNode = null; this.recorder = null; this.workletRecorder = null; this.biquadFilter = null; this.userMediaStream = null; this.clearWorkletUrlTimeout = null; // buffersize this.bufferSize = 512; // this._opt.audioBufferLength = this.calcAudioBufferLength(); this.audioBufferList = []; // socket this.socket = null; this.socketStatus = WEBSOCKET_STATUS.notConnect; // this.mediaStreamSource = null; this.heartInterval = null; this.checkGetUserMediaTimeout = null; this.wsUrl = null; this.startTimestamp = 0; // 报文数据 this.sequenceId = 0; this.tempTimestamp = null; this.tempRtpBufferList = []; this.events = new Events$1(this); this._initTalk(); // if (!this.player) { this.debug = new Debug(this); } if ((this._opt.encType === TALK_ENC_TYPE.g711a || this._opt.encType === TALK_ENC_TYPE.g711u) && !(this._opt.sampleRate === 8000 && this._opt.sampleBitsWidth === 16)) { this.warn(this.tag, ` encType is ${this._opt.encType} and sampleBitsWidth is ${this._opt.sampleBitsWidth}, set sampleBitsWidth to ${this._opt.sampleBitsWidth}。 ${this._opt.encType} only support sampleRate 8000 and sampleBitsWidth 16`); } this.log(this.tag, 'init', JSON.stringify(this._opt)); } destroy() { if (this.clearWorkletUrlTimeout) { clearTimeout(this.clearWorkletUrlTimeout); this.clearWorkletUrlTimeout = null; } // if (this.userMediaStream) { this.userMediaStream.getTracks && this.userMediaStream.getTracks().forEach(track => { track.stop(); }); this.userMediaStream = null; } if (this.mediaStreamSource) { this.mediaStreamSource.disconnect(); this.mediaStreamSource = null; } if (this.recorder) { this.recorder.disconnect(); this.recorder.onaudioprocess = null; this.recorder = null; } if (this.biquadFilter) { this.biquadFilter.disconnect(); this.biquadFilter = null; } if (this.gainNode) { this.gainNode.disconnect(); this.gainNode = null; } if (this.workletRecorder) { this.workletRecorder.disconnect(); this.workletRecorder = null; } if (this.socket) { if (this.socketStatus === WEBSOCKET_STATUS.open) { this._sendClose(); } this.socket.close(); this.socket = null; } this._stopHeartInterval(); this._stopCheckGetUserMediaTimeout(); this.audioContext = null; this.gainNode = null; this.recorder = null; this.audioBufferList = []; this.sequenceId = 0; this.wsUrl = null; this.tempTimestamp = null; this.tempRtpBufferList = []; this.startTimestamp = 0; this.log('talk', 'destroy'); } addRtpToBuffer(rtp) { const len = rtp.length + this.tempRtpBufferList.length; const buffer = new Uint8Array(len); buffer.set(this.tempRtpBufferList, 0); buffer.set(rtp, this.tempRtpBufferList.length); this.tempRtpBufferList = buffer; //console.log('addRtpToBuffer length and byteLength ', this.tempRtpBufferList.length, this.tempRtpBufferList.byteLength) } downloadRtpFile() { const blob = new Blob([this.tempRtpBufferList]); try { const oa = document.createElement('a'); oa.href = window.URL.createObjectURL(blob); oa.download = Date.now() + '.rtp'; oa.click(); window.URL.revokeObjectURL(oa.href); } catch (e) { console.error('downloadRtpFile', e); } } calcAudioBufferLength() { const { sampleRate, sampleBitsWidth } = this._opt; // 默认走的是 20ms 8000 采样率 16 位精度 return sampleRate * 8 * (20 / 1000) / 8; } get socketStatusOpen() { return this.socketStatus === WEBSOCKET_STATUS.open; } log() { for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } this._log('log', ...args); } warn() { for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { args[_key2] = arguments[_key2]; } this._log('warn', ...args); } error() { for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) { args[_key3] = arguments[_key3]; } this._log('error', ...args); } _log(type) { for (var _len4 = arguments.length, args = new Array(_len4 > 1 ? _len4 - 1 : 0), _key4 = 1; _key4 < _len4; _key4++) { args[_key4 - 1] = arguments[_key4]; } if (this.player) { this.player.debug[type](...args); } else if (this.debug) { this.debug[type](...args); } else { console[type](...args); } } _getSequenceId() { return ++this.sequenceId; } _createWebSocket() { return new Promise((resolve, reject) => { const proxy = this.events.proxy; this.socket = new WebSocket(this.wsUrl); this.socket.binaryType = 'arraybuffer'; this.emit(EVENTS.talkStreamStart); proxy(this.socket, WEBSOCKET_EVENTS.open, () => { this.socketStatus = WEBSOCKET_STATUS.open; this.log(this.tag, 'websocket open -> do talk'); this.emit(EVENTS.talkStreamOpen); resolve(); this._doTalk(); }); proxy(this.socket, WEBSOCKET_EVENTS.message, event => { this.log(this.tag, 'websocket message', event.data); }); proxy(this.socket, WEBSOCKET_EVENTS.close, e => { this.socketStatus = WEBSOCKET_STATUS.close; this.warn(this.tag, 'websocket close -> reject', e); this.emit(EVENTS.talkStreamClose); reject(e); }); proxy(this.socket, WEBSOCKET_EVENTS.error, error => { this.socketStatus = WEBSOCKET_STATUS.error; this.error(this.tag, 'websocket error -> reject', error); this.emit(EVENTS.talkStreamError, error); reject(error); }); }); } _sendClose() {} _initTalk() { this._initMethods(); if (this._opt.engine === TALK_ENGINE.worklet) { this._initWorklet(); } else if (this._opt.engine === TALK_ENGINE.script) { this._initScriptProcessor(); } this.log(this.tag, 'audioContext samplerate', this.audioContext.sampleRate); } _initMethods() { // this.audioContext = new (window.AudioContext || window.webkitAudioContext)({ sampleRate: 48000 }); this.gainNode = this.audioContext.createGain(); // default 1 this.gainNode.gain.value = 1; // 消音器 this.biquadFilter = this.audioContext.createBiquadFilter(); this.biquadFilter.type = "lowpass"; this.biquadFilter.frequency.value = 3000; this.resampler = new Resampler({ fromSampleRate: this.audioContext.sampleRate, toSampleRate: this._opt.sampleRate, channels: this._opt.numberChannels, inputBufferSize: this.bufferSize }); } _initScriptProcessor() { // const createScript = this.audioContext.createScriptProcessor || this.audioContext.createJavaScriptNode; this.recorder = createScript.apply(this.audioContext, [this.bufferSize, this._opt.numberChannels, this._opt.numberChannels]); this.recorder.onaudioprocess = e => this._onaudioprocess(e); } _initWorklet() { function workletProcess() { class TalkProcessor extends AudioWorkletProcessor { constructor(options) { super(); this._cursor = 0; this._bufferSize = options.processorOptions.bufferSize; this._buffer = new Float32Array(this._bufferSize); } process(inputs, outputs, parameters) { if (!inputs.length || !inputs[0].length) { return true; } for (let i = 0; i < inputs[0][0].length; i++) { this._cursor += 1; if (this._cursor === this._bufferSize) { this._cursor = 0; this.port.postMessage({ eventType: 'data', buffer: this._buffer }); } this._buffer[this._cursor] = inputs[0][0][i]; } return true; } } registerProcessor('talk-processor', TalkProcessor); } const workletUrl = createWorkletModuleUrl(workletProcess); this.audioContext.audioWorklet && this.audioContext.audioWorklet.addModule(workletUrl).then(() => { const workletNode = new AudioWorkletNode(this.audioContext, 'talk-processor', { processorOptions: { bufferSize: this.bufferSize } }); workletNode.connect(this.gainNode); workletNode.port.onmessage = e => { if (e.data.eventType === 'data') { this._encodeAudioData(e.data.buffer); } }; this.workletRecorder = workletNode; }); this.clearWorkletUrlTimeout = setTimeout(() => { URL.revokeObjectURL(workletUrl); this.clearWorkletUrlTimeout = null; }, URL_OBJECT_CLEAR_TIME); } _onaudioprocess(e) { // 数组里的每个数字都是32位的单精度浮点数 // 默认是单精度 const float32Array = e.inputBuffer.getChannelData(0); // send 出去。 this._encodeAudioData(new Float32Array(float32Array)); } _encodeAudioData(float32Array) { // 没有说话 if (float32Array[0] === 0 && float32Array[1] === 0) { this.log(this.tag, 'empty audio data'); return; } const resampleBuffer = this.resampler.resample(float32Array); // default 32Bit let tempArrayBuffer = resampleBuffer; if (this._opt.sampleBitsWidth === 16) { tempArrayBuffer = floatTo16BitPCM(resampleBuffer); } else if (this._opt.sampleBitsWidth === 8) { tempArrayBuffer = floatTo8BitPCM(resampleBuffer); } if (tempArrayBuffer.buffer !== null) { let typedArray = null; if (this._opt.encType === TALK_ENC_TYPE.g711a) { typedArray = g711aEncoder(tempArrayBuffer); } else if (this._opt.encType === TALK_ENC_TYPE.g711u) { typedArray = g711uEncoder(tempArrayBuffer); } else if (this._opt.encType === TALK_ENC_TYPE.pcm) { typedArray = tempArrayBuffer; } // 变成了8位 array 了 const unit8Array = new Uint8Array(typedArray); for (let i = 0; i < unit8Array.length; i++) { let audioBufferLength = this.audioBufferList.length; this.audioBufferList[audioBufferLength++] = unit8Array[i]; if (this.audioBufferList.length === this._opt.audioBufferLength) { this._sendTalkMsg(new Uint8Array(this.audioBufferList)); this.audioBufferList = []; } } } } _parseAudioMsg(typedArray) { let typeArray2 = null; // rtp reumx just support g711a or g711u or opus if (this._opt.packetType === TALK_PACKET_TYPE.rtp && (this._opt.encType === TALK_ENC_TYPE.g711a || this._opt.encType === TALK_ENC_TYPE.g711u)) { typeArray2 = this.rtpPacket(typedArray); } else if (this._opt.packetType === TALK_PACKET_TYPE.empty) { // 默认 typeArray2 = typedArray; } return typeArray2; } rtpPacket(typedArray) { const rtpHeader = []; //2 bits RTP的版本,这里统一为2 const version = 2; //1 bit 如果置1,在packet的末尾被填充,填充有时是方便一些针对固定长度的算法的封装 const padding = 0; //1 bit 如果置1,在RTP Header会跟着一个header extension const extension = 0; //4 bits 表示头部后 特约信源 的个数 const csrcCount = 0; //1 bit 不同的有效载荷有不同的含义,marker=1; 对于视频,标记一帧的结束;对于音频,标记会话的开始。 const marker = 1; //7 bits 表示所传输的多媒体的类型, let playloadType = 0; //16 bits 每个RTP packet的sequence number会自动加一,以便接收端检测丢包情况 let sequenceNumber = 0; //32 bits 时间戳 let timestamp = 0; //32 bits 同步源的id,每两个同步源的id不能相同 const ssrc = this._opt.rtpSsrc; // const frameLen = typedArray.length; if (this._opt.encType === TALK_ENC_TYPE.g711a) { playloadType = RTP_PAYLOAD_TYPE.g711a; } else if (this._opt.encType === TALK_ENC_TYPE.g711u) { playloadType = RTP_PAYLOAD_TYPE.g711u; } else if (this._opt.encType === TALK_ENC_TYPE.opus) { playloadType = RTP_PAYLOAD_TYPE.opus; } if (!this.startTimestamp) { this.startTimestamp = now$2(); } timestamp = now$2() - this.startTimestamp; sequenceNumber = this._getSequenceId(); // frame length // 需要在rtp头前面加两个字节,表示数据包长度(整个rtp包长度) // 国标流udp不需要两个字节长度 // 国标流tcp需要两个字节长度 // websocket目前是按照tcp的做法做的 let index = 0; if (this._opt.packetTcpSendType === TALK_PACKAGE_TCP_SEND_TYPE.tcp) { const rtpFrameLen = frameLen + 12; // 0 rtpHeader[index++] = 0xFF & rtpFrameLen >> 8; // 1 rtpHeader[index++] = 0xFF & rtpFrameLen >> 0; } rtpHeader[index++] = (version << 6) + (padding << 5) + (extension << 4) + csrcCount; rtpHeader[index++] = (marker << 7) + playloadType; rtpHeader[index++] = sequenceNumber / (0xff + 1); rtpHeader[index++] = sequenceNumber % (0xff + 1); rtpHeader[index++] = timestamp / (0xffff + 1) / (0xff + 1); rtpHeader[index++] = timestamp / (0xffff + 1) % (0xff + 1); rtpHeader[index++] = timestamp % (0xffff + 1) / (0xff + 1); rtpHeader[index++] = timestamp % (0xffff + 1) % (0xff + 1); rtpHeader[index++] = ssrc / (0xffff + 1) / (0xff + 1); rtpHeader[index++] = ssrc / (0xffff + 1) % (0xff + 1); rtpHeader[index++] = ssrc % (0xffff + 1) / (0xff + 1); rtpHeader[index++] = ssrc % (0xffff + 1) % (0xff + 1); let typeArray2 = rtpHeader.concat([...typedArray]); let binary = new Uint8Array(typeArray2.length); for (let ii = 0; ii < typeArray2.length; ii++) { binary[ii] = typeArray2[ii]; } return binary; } opusPacket(typedArray) { //TODO:待完成 return typedArray; } _sendTalkMsg(typedArray) { if (this.tempTimestamp === null) { this.tempTimestamp = now$2(); } const timestamp = now$2(); const diff = timestamp - this.tempTimestamp; const typedArray2 = this._parseAudioMsg(typedArray); this.log(this.tag, `'send talk msg and diff is ${diff} and byteLength is ${typedArray2.byteLength} and length is ${typedArray2.length}, and g711 length is ${typedArray.length}`); if (isTrue(this._opt.saveRtpToFile)) { if (this._opt.packetType === TALK_PACKET_TYPE.rtp) { this.addRtpToBuffer(typedArray2); } } if (typedArray2) { if (this.socketStatusOpen) { this.socket.send(typedArray2.buffer); } else { this.emit(EVENTS_ERROR.tallWebsocketClosedByError); } } this.tempTimestamp = timestamp; } _doTalk() { this._getUserMedia(); // this._getUserMedia2(); // this._getUserMedia3(); } _getUserMedia() { this.log(this.tag, 'getUserMedia'); // 老的浏览器可能根本没有实现 mediaDevices,所以我们可以先设置一个空的对象 if (window.navigator.mediaDevices === undefined) { window.navigator.mediaDevices = {}; } // 一些浏览器部分支持 mediaDevices。我们不能直接给对象设置 getUserMedia // 因为这样可能会覆盖已有的属性。这里我们只会在没有 getUserMedia 属性的时候添加它。 if (window.navigator.mediaDevices.getUserMedia === undefined) { this.log(this.tag, 'window.navigator.mediaDevices.getUserMedia is undefined and init function'); window.navigator.mediaDevices.getUserMedia = function (constraints) { // 首先,如果有 getUserMedia 的话,就获得它 var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; // 一些浏览器根本没实现它 - 那么就返回一个 error 到 promise 的 reject 来保持一个统一的接口 // 由于受浏览器的限制,navigator.mediaDevices.getUserMedia在https协议下是可以正常使用的, // 而在http协议下只允许localhost/127.0.0.1这两个域名访问, // 因此在开发时应做好容灾处理,上线时则需要确认生产环境是否处于https协议下。 if (!getUserMedia) { return Promise.reject(new Error('getUserMedia is not implemented in this browser')); } // 否则,为老的 navigator.getUserMedia 方法包裹一个 Promise return new Promise(function (resolve, reject) { getUserMedia.call(navigator, constraints, resolve, reject); }); }; } if (this._opt.checkGetUserMediaTimeout) { this._startCheckGetUserMediaTimeout(); } // 最后经过反复测试,只有noiseSuppression+echoCancellation同时生效时,打开录音后再播放音频,系统音量一定会变小, // 很惨的是getUserMedia只要你没有配置这两个参数,默认就是同时开启的;只要你给这两参数任意一个设为false,或者都设为false, // 就不会影响手机系统音量。 window.navigator.mediaDevices.getUserMedia({ audio: this._opt.audioConstraints, video: false }).then(stream => { this.log(this.tag, 'getUserMedia success'); this.userMediaStream = stream; this.mediaStreamSource = this.audioContext.createMediaStreamSource(stream); this.mediaStreamSource.connect(this.biquadFilter); if (this.recorder) { this.biquadFilter.connect(this.recorder); this.recorder.connect(this.gainNode); } else if (this.workletRecorder) { this.biquadFilter.connect(this.workletRecorder); this.workletRecorder.connect(this.gainNode); } this.gainNode.connect(this.audioContext.destination); this.emit(EVENTS.talkGetUserMediaSuccess); // check stream inactive if (stream.oninactive === null) { stream.oninactive = e => { this._handleStreamInactive(e); }; } }).catch(e => { this.error(this.tag, 'getUserMedia error', e.toString()); this.emit(EVENTS.talkGetUserMediaFail, e.toString()); }).finally(() => { this.log(this.tag, 'getUserMedia finally'); this._stopCheckGetUserMediaTimeout(); }); } _getUserMedia2() { this.log(this.tag, 'getUserMedia'); navigator.mediaDevices ? navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => { this.log(this.tag, 'getUserMedia2 success'); }) : navigator.getUserMedia({ audio: true }, this.log(this.tag, 'getUserMedia2 success'), this.log(this.tag, 'getUserMedia2 fail')); } async _getUserMedia3() { this.log(this.tag, 'getUserMedia3'); try { const stream = await navigator.mediaDevices.getUserMedia({ audio: { latency: true, noiseSuppression: true, autoGainControl: true, echoCancellation: true, sampleRate: 48000, channelCount: 1 }, video: false }); console.log('getUserMedia() got stream:', stream); this.log(this.tag, 'getUserMedia3 success'); } catch (e) { this.log(this.tag, 'getUserMedia3 fail'); } } _handleStreamInactive(e) { if (this.userMediaStream) { this.warn(this.tag, 'stream oninactive', e); this.emit(EVENTS.talkStreamInactive); } } _startCheckGetUserMediaTimeout() { this._stopCheckGetUserMediaTimeout(); this.checkGetUserMediaTimeout = setTimeout(() => { this.log(this.tag, 'check getUserMedia timeout'); this.emit(EVENTS.talkGetUserMediaTimeout); }, this._opt.getUserMediaTimeout); } _stopCheckGetUserMediaTimeout() { if (this.checkGetUserMediaTimeout) { this.log(this.tag, 'stop checkGetUserMediaTimeout'); clearTimeout(this.checkGetUserMediaTimeout); this.checkGetUserMediaTimeout = null; } } _startHeartInterval() { // 定时发送心跳, this.heartInterval = setInterval(() => { this.log(this.tag, 'heart interval'); let data = [0x23, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; data = new Uint8Array(data); this.socket.send(data.buffer); }, 15 * 1000); } _stopHeartInterval() { if (this.heartInterval) { this.log(this.tag, 'stop heart interval'); clearInterval(this.heartInterval); this.heartInterval = null; } } startTalk(wsUrl) { return new Promise((resolve, reject) => { if (!isSupportGetUserMedia()) { return reject('not support getUserMedia'); } this.wsUrl = wsUrl; if (this._opt.testMicrophone) { this._doTalk(); } else { if (!this.wsUrl) { return reject('wsUrl is null'); } this._createWebSocket().catch(e => { reject(e); }); } // reject this.once(EVENTS.talkGetUserMediaFail, () => { reject('getUserMedia fail'); }); // only get user media success and resolve this.once(EVENTS.talkGetUserMediaSuccess, () => { resolve(); }); }); } setVolume(volume) { volume = parseFloat(volume).toFixed(2); if (isNaN(volume)) { return; } volume = clamp(volume, 0, 1); this.gainNode.gain.value = volume; } getOption() { return this._opt; } get volume() { return this.gainNode ? parseFloat(this.gainNode.gain.value * 100).toFixed(0) : null; } } class Watermark { constructor(player) { this.player = player; this.globalSetting = null; const prefixUuid = uuid16(); this.defaultSettings = { watermark_id: `JbPro_${prefixUuid}`, watermark_prefix: `JbPro_mask_${prefixUuid}`, watermark_txt: "JbPro 测试水印", //水印的内容 watermark_x: 0, //水印起始位置x轴坐标 watermark_y: 0, //水印起始位置Y轴坐标 watermark_rows: 0, //水印行数 watermark_cols: 0, //水印列数 watermark_x_space: 0, //水印x轴间隔 watermark_y_space: 0, //水印y轴间隔 watermark_font: '微软雅黑', //水印字体 watermark_color: 'black', //水印字体颜色 watermark_fontsize: '18px', //水印字体大小 watermark_alpha: 0.15, //水印透明度,要求设置在大于等于0.005 watermark_width: 150, //水印宽度 watermark_height: 100, //水印长度 watermark_angle: 15, //水印倾斜度数 watermark_parent_width: 0, //水印的总体宽度(默认值:body的scrollWidth和clientWidth的较大值) watermark_parent_height: 0, //水印的总体高度(默认值:body的scrollHeight和clientHeight的较大值) watermark_parent_node: null //水印插件挂载的父元素element,不输入则默认挂在body上 }; this.player.debug.log('Watermark', 'int'); } destroy() { this._removeMark(); this.globalSetting = null; this.defaultSettings = { watermark_id: ``, //水印总体的id watermark_prefix: ``, //小水印的id前缀 watermark_txt: "JbPro 测试水印", //水印的内容 watermark_x: 0, //水印起始位置x轴坐标 watermark_y: 0, //水印起始位置Y轴坐标 watermark_rows: 0, //水印行数 watermark_cols: 0, //水印列数 watermark_x_space: 0, //水印x轴间隔 watermark_y_space: 0, //水印y轴间隔 watermark_font: '微软雅黑', //水印字体 watermark_color: 'black', //水印字体颜色 watermark_fontsize: '18px', //水印字体大小 watermark_alpha: 0.15, //水印透明度,要求设置在大于等于0.005 watermark_width: 150, //水印宽度 watermark_height: 100, //水印长度 watermark_angle: -15, //水印倾斜度数 watermark_parent_width: 0, //水印的总体宽度(默认值:body的scrollWidth和clientWidth的较大值) watermark_parent_height: 0, //水印的总体高度(默认值:body的scrollHeight和clientHeight的较大值) watermark_parent_node: null //水印插件挂载的父元素element,不输入则默认挂在body上 }; this.player.debug.log('Watermark', 'destroy'); } remove() { this._removeMark(); } load(settings) { this.globalSetting = settings; this._loadMark(settings); } resize() { this.player.debug.log('Watermark', 'resize()'); if (this.globalSetting) { this._loadMark(this.globalSetting); } } _loadMark() { let defaultSettings = this.defaultSettings; /*采用配置项替换默认值,作用类似jquery.extend*/ if (arguments.length === 1 && typeof arguments[0] === "object") { var src = arguments[0] || {}; for (let key in src) { if (src[key] && defaultSettings[key] && src[key] === defaultSettings[key]) continue; /*veronic: resolution of watermark_angle=0 not in force*/else if (src[key] || src[key] === 0) defaultSettings[key] = src[key]; } } /*如果元素存在则移除*/ var watermark_element = document.getElementById(defaultSettings.watermark_id); watermark_element && watermark_element.parentNode && watermark_element.parentNode.removeChild(watermark_element); /*如果设置水印挂载的父元素的id*/ var watermark_parent_element = typeof defaultSettings.watermark_parent_node === 'string' ? document.getElementById(defaultSettings.watermark_parent_node) : defaultSettings.watermark_parent_node; var watermark_hook_element = watermark_parent_element ? watermark_parent_element : document.body; /*获取页面宽度*/ const clientRect = watermark_hook_element.getBoundingClientRect(); var page_width = Math.max(watermark_hook_element.scrollWidth, watermark_hook_element.clientWidth, clientRect.width); /*获取页面最大长度*/ var page_height = Math.max(watermark_hook_element.scrollHeight, watermark_hook_element.clientHeight, clientRect.height); var setting = arguments[0] || {}; var parentEle = watermark_hook_element; var page_offsetTop = 0; var page_offsetLeft = 0; if (setting.watermark_parent_width || setting.watermark_parent_height) { /*指定父元素同时指定了宽或高*/ if (parentEle) { // page_offsetTop = parentEle.offsetTop || 0; // page_offsetLeft = parentEle.offsetLeft || 0; defaultSettings.watermark_x = defaultSettings.watermark_x + page_offsetLeft; defaultSettings.watermark_y = defaultSettings.watermark_y + page_offsetTop; } } /*创建水印外壳div*/ var otdiv = document.getElementById(defaultSettings.watermark_id); var shadowRoot = null; if (!otdiv) { otdiv = document.createElement('div'); /*创建shadow dom*/ otdiv.id = defaultSettings.watermark_id; otdiv.setAttribute('style', 'pointer-events: none !important; display: block !important'); /*判断浏览器是否支持attachShadow方法*/ if (typeof otdiv.attachShadow === 'function') { /* createShadowRoot Deprecated. Not for use in new websites. Use attachShadow*/ shadowRoot = otdiv.attachShadow({ mode: 'open' }); } else { shadowRoot = otdiv; } /*将shadow dom随机插入body内的任意位置*/ var nodeList = watermark_hook_element.children; var index = Math.floor(Math.random() * (nodeList.length - 1)) + 1; if (nodeList[index]) { watermark_hook_element.insertBefore(otdiv, nodeList[index]); } else { watermark_hook_element.appendChild(otdiv); } } else if (otdiv.shadowRoot) { shadowRoot = otdiv.shadowRoot; } /*三种情况下会重新计算水印列数和x方向水印间隔: 1、水印列数设置为0, 2、水印宽度大于页面宽度, 3、水印宽度小于于页面宽度 */ defaultSettings.watermark_cols = parseInt((page_width - defaultSettings.watermark_x) / (defaultSettings.watermark_width + defaultSettings.watermark_x_space)); var temp_watermark_x_space = parseInt((page_width - defaultSettings.watermark_x - defaultSettings.watermark_width * defaultSettings.watermark_cols) / defaultSettings.watermark_cols); defaultSettings.watermark_x_space = temp_watermark_x_space ? defaultSettings.watermark_x_space : temp_watermark_x_space; var allWatermarkWidth; defaultSettings.watermark_rows = parseInt((page_height - defaultSettings.watermark_y) / (defaultSettings.watermark_height + defaultSettings.watermark_y_space)); var temp_watermark_y_space = parseInt((page_height - defaultSettings.watermark_y - defaultSettings.watermark_height * defaultSettings.watermark_rows) / defaultSettings.watermark_rows); defaultSettings.watermark_y_space = temp_watermark_y_space ? defaultSettings.watermark_y_space : temp_watermark_y_space; var allWatermarkHeight; if (watermark_parent_element) { allWatermarkWidth = defaultSettings.watermark_x + defaultSettings.watermark_width * defaultSettings.watermark_cols + defaultSettings.watermark_x_space * (defaultSettings.watermark_cols - 1); allWatermarkHeight = defaultSettings.watermark_y + defaultSettings.watermark_height * defaultSettings.watermark_rows + defaultSettings.watermark_y_space * (defaultSettings.watermark_rows - 1); } else { allWatermarkWidth = page_offsetLeft + defaultSettings.watermark_x + defaultSettings.watermark_width * defaultSettings.watermark_cols + defaultSettings.watermark_x_space * (defaultSettings.watermark_cols - 1); allWatermarkHeight = page_offsetTop + defaultSettings.watermark_y + defaultSettings.watermark_height * defaultSettings.watermark_rows + defaultSettings.watermark_y_space * (defaultSettings.watermark_rows - 1); } var x; var y; for (var i = 0; i < defaultSettings.watermark_rows; i++) { if (watermark_parent_element) { y = page_offsetTop + defaultSettings.watermark_y + (page_height - allWatermarkHeight) / 2 + (defaultSettings.watermark_y_space + defaultSettings.watermark_height) * i; } else { y = defaultSettings.watermark_y + (page_height - allWatermarkHeight) / 2 + (defaultSettings.watermark_y_space + defaultSettings.watermark_height) * i; } for (var j = 0; j < defaultSettings.watermark_cols; j++) { if (watermark_parent_element) { x = page_offsetLeft + defaultSettings.watermark_x + (page_width - allWatermarkWidth) / 2 + (defaultSettings.watermark_width + defaultSettings.watermark_x_space) * j; } else { x = defaultSettings.watermark_x + (page_width - allWatermarkWidth) / 2 + (defaultSettings.watermark_width + defaultSettings.watermark_x_space) * j; } var mask_div = document.createElement('div'); var oText = document.createTextNode(defaultSettings.watermark_txt); mask_div.appendChild(oText); /*设置水印相关属性start*/ mask_div.id = defaultSettings.watermark_prefix + i + j; /*设置水印div倾斜显示*/ mask_div.style.webkitTransform = "rotate(" + defaultSettings.watermark_angle + "deg)"; mask_div.style.MozTransform = "rotate(" + defaultSettings.watermark_angle + "deg)"; mask_div.style.msTransform = "rotate(" + defaultSettings.watermark_angle + "deg)"; mask_div.style.OTransform = "rotate(" + defaultSettings.watermark_angle + "deg)"; mask_div.style.transform = "rotate(" + defaultSettings.watermark_angle + "deg)"; mask_div.style.visibility = ""; mask_div.style.position = "absolute"; /*选不中*/ mask_div.style.left = x + 'px'; mask_div.style.top = y + 'px'; mask_div.style.overflow = "hidden"; mask_div.style.zIndex = "9999999"; mask_div.style.opacity = defaultSettings.watermark_alpha; mask_div.style.fontSize = defaultSettings.watermark_fontsize; mask_div.style.fontFamily = defaultSettings.watermark_font; mask_div.style.color = defaultSettings.watermark_color; mask_div.style.textAlign = "center"; mask_div.style.width = defaultSettings.watermark_width + 'px'; mask_div.style.height = defaultSettings.watermark_height + 'px'; mask_div.style.display = "block"; mask_div.style['-ms-user-select'] = "none"; /*设置水印相关属性end*/ shadowRoot.appendChild(mask_div); } } } _removeMark() { const defaultSettings = this.defaultSettings; /*移除水印*/ var watermark_element = document.getElementById(defaultSettings.watermark_id); if (watermark_element) { var _parentElement = watermark_element.parentNode; if (_parentElement) { _parentElement.removeChild(watermark_element); } } } } class WatermarkV2 { constructor(player) { this.player = player; this.globalSetting = null; const prefixUuid = uuid16(); this.defaultSettings = { watermark_id: `JbPro_${prefixUuid}`, watermark_prefix: `JbPro_mask_${prefixUuid}`, watermark_txt: "JbPro 测试水印", //水印的内容 watermark_x: 0, //水印起始位置x轴坐标 watermark_y: 0, //水印起始位置Y轴坐标 watermark_rows: 0, //水印行数 watermark_cols: 0, //水印列数 watermark_x_space: 100, //水印x轴间隔 watermark_y_space: 100, //水印y轴间隔 watermark_font: '微软雅黑', //水印字体 watermark_color: 'black', //水印字体颜色 watermark_fontsize: 18, //水印字体大小 watermark_alpha: 0.15, //水印透明度,要求设置在大于等于0.005 watermark_width: 150, //水印宽度 watermark_height: 100, //水印长度 watermark_angle: -15, //水印倾斜度数 watermark_parent_width: 0, //水印的总体宽度(默认值:body的scrollWidth和clientWidth的较大值) watermark_parent_height: 0, //水印的总体高度(默认值:body的scrollHeight和clientHeight的较大值) watermark_parent_node: null //水印插件挂载的父元素element,不输入则默认挂在body上 }; this.player.debug.log('Watermark', 'int'); } destroy() { this._removeMark(); this.globalSetting = null; this.defaultSettings = { watermark_id: ``, //水印总体的id watermark_prefix: ``, //小水印的id前缀 watermark_txt: "JbPro 测试水印", //水印的内容 watermark_x: 0, //水印起始位置x轴坐标 watermark_y: 0, //水印起始位置Y轴坐标 watermark_rows: 0, //水印行数 watermark_cols: 0, //水印列数 watermark_x_space: 100, //水印x轴间隔 watermark_y_space: 100, //水印y轴间隔 watermark_font: '微软雅黑', //水印字体 watermark_color: 'black', //水印字体颜色 watermark_fontsize: 18, //水印字体大小 watermark_alpha: 0.15, //水印透明度,要求设置在大于等于0.005 watermark_width: 150, //水印宽度 watermark_height: 100, //水印长度 watermark_angle: -15, //水印倾斜度数 watermark_parent_width: 0, //水印的总体宽度(默认值:body的scrollWidth和clientWidth的较大值) watermark_parent_height: 0, //水印的总体高度(默认值:body的scrollHeight和clientHeight的较大值) watermark_parent_node: null //水印插件挂载的父元素element,不输入则默认挂在body上 }; this.player.debug.log('Watermark', 'destroy'); } remove() { this._removeMark(); } load(settings) { this.globalSetting = settings; this._loadMark(settings); } resize() { this.player.debug.log('Watermark', 'resize()'); if (this.globalSetting) { this._loadMark(this.globalSetting); } } _loadMark() { let defaultSettings = this.defaultSettings; /*采用配置项替换默认值,作用类似jquery.extend*/ if (arguments.length === 1 && typeof arguments[0] === "object") { var src = arguments[0] || {}; for (let key in src) { if (src[key] && defaultSettings[key] && src[key] === defaultSettings[key]) continue; /*veronic: resolution of watermark_angle=0 not in force*/else if (src[key] || src[key] === 0) defaultSettings[key] = src[key]; } } /*如果元素存在则移除*/ var watermark_element = document.getElementById(defaultSettings.watermark_id); watermark_element && watermark_element.parentNode && watermark_element.parentNode.removeChild(watermark_element); /*如果设置水印挂载的父元素的id*/ var watermark_parent_element = typeof defaultSettings.watermark_parent_node === 'string' ? document.getElementById(defaultSettings.watermark_parent_node) : defaultSettings.watermark_parent_node; var watermark_hook_element = watermark_parent_element ? watermark_parent_element : document.body; /*获取页面宽度*/ const clientRect = watermark_hook_element.getBoundingClientRect(); var page_width = Math.max(watermark_hook_element.scrollWidth, watermark_hook_element.clientWidth, clientRect.width); /*获取页面最大长度*/ var page_height = Math.max(watermark_hook_element.scrollHeight, watermark_hook_element.clientHeight, clientRect.height); var setting = arguments[0] || {}; var parentEle = watermark_hook_element; var page_offsetTop = 0; var page_offsetLeft = 0; if (setting.watermark_parent_width || setting.watermark_parent_height) { /*指定父元素同时指定了宽或高*/ if (parentEle) { // page_offsetTop = parentEle.offsetTop || 0; // page_offsetLeft = parentEle.offsetLeft || 0; defaultSettings.watermark_x = defaultSettings.watermark_x + page_offsetLeft; defaultSettings.watermark_y = defaultSettings.watermark_y + page_offsetTop; } } /*创建水印外壳div*/ var otdiv = document.getElementById(defaultSettings.watermark_id); var shadowRoot = null; if (!otdiv) { otdiv = document.createElement('div'); /*创建shadow dom*/ otdiv.id = defaultSettings.watermark_id; otdiv.setAttribute('style', 'pointer-events: none !important; display: block !important'); /*判断浏览器是否支持attachShadow方法*/ if (typeof otdiv.attachShadow === 'function') { /* createShadowRoot Deprecated. Not for use in new websites. Use attachShadow*/ shadowRoot = otdiv.attachShadow({ mode: 'open' }); } else { shadowRoot = otdiv; } /*将shadow dom随机插入body内的任意位置*/ var nodeList = watermark_hook_element.children; var index = Math.floor(Math.random() * (nodeList.length - 1)) + 1; if (nodeList[index]) { watermark_hook_element.insertBefore(otdiv, nodeList[index]); } else { watermark_hook_element.appendChild(otdiv); } } else if (otdiv.shadowRoot) { shadowRoot = otdiv.shadowRoot; } // 替换水印方案 const attr = this._calcTextSize(); const options = this.defaultSettings; const width = options.watermark_x_space + attr.width; const height = options.watermark_y_space + attr.height; const height_2 = height * 2; const width_0_5 = width / 2; const svg = `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="none"> <defs> <pattern id="pattern1" x="0" y="0" width="${width}" height="${height_2}" patternUnits="userSpaceOnUse" patternTransform="rotate(${options.watermark_angle})"> <text x="0" y="${options.watermark_fontsize}" style="font-family:${options.watermark_font}; font-size:${options.watermark_fontsize}; fill:${options.watermark_color}; fill-opacity:${options.watermark_alpha}">${options.watermark_txt}</text> </pattern> <pattern id="pattern2" x="${width_0_5}" y="${height}" width="${width}" height="${height_2}" patternUnits="userSpaceOnUse" patternTransform="rotate(${options.watermark_angle})"> <text x="0" y="${options.watermark_fontsize}" style="font-family:${options.watermark_font}; font-size:${options.watermark_fontsize}; fill:${options.watermark_color}; fill-opacity:${options.watermark_alpha}">${options.watermark_txt}</text> </pattern> </defs> <rect x="0" y="0" width="100%" height="100%" style="fill:url(#pattern1); fill-opacity:1;" /> <rect x="0" y="0" width="100%" height="100%" style="fill:url(#pattern2); fill-opacity:1;" /> </svg>`; const svgBase64 = window.btoa(unescape(encodeURIComponent(svg))); var mask_div = document.createElement('div'); mask_div.style.position = "absolute"; mask_div.style.left = '0px'; mask_div.style.top = '0px'; mask_div.style.overflow = "hidden"; mask_div.style.zIndex = "9999999"; mask_div.style.width = page_width + 'px'; mask_div.style.height = page_height + 'px'; mask_div.style.display = "block"; mask_div.style['-ms-user-select'] = "none"; mask_div.style.backgroundImage = `url(data:image/svg+xml;base64,${svgBase64})`; shadowRoot.appendChild(mask_div); } _removeMark() { const defaultSettings = this.defaultSettings; /*移除水印*/ var watermark_element = document.getElementById(defaultSettings.watermark_id); if (watermark_element) { var _parentElement = watermark_element.parentNode; if (_parentElement) { _parentElement.removeChild(watermark_element); } } } _calcTextSize() { const { watermark_txt, watermark_font, watermark_fontsize } = this.globalSetting; const $span = document.createElement('span'); $span.innerHTML = watermark_txt; $span.setAttribute('style', `font-family: ${watermark_font}; font-size: ${watermark_fontsize}px; visibility: hidden; display: inline-block`); document.querySelector('body').appendChild($span); const attr = { width: $span.offsetWidth, height: $span.offsetHeight }; $span.remove(); return attr; } } /** * Desc: ptz cmd 封装 */ const PTZ_TYPE = { stop: 'stop', fiStop: 'fiStop', right: 'right', left: 'left', up: 'up', down: 'down', leftUp: 'leftUp', leftDown: 'leftDown', rightUp: 'rightUp', rightDown: 'rightDown', zoomExpand: 'zoomExpand', zoomNarrow: 'zoomNarrow', apertureFar: 'apertureFar', apertureNear: 'apertureNear', focusFar: 'focusFar', focusNear: 'focusNear', setPos: 'setPos', calPos: 'calPos', delPos: 'delPos', wiperOpen: 'wiperOpen', wiperClose: 'wiperClose', cruiseStart: 'cruiseStart', cruiseStop: 'cruiseStop' }; const PTZ_CMD_TYPE = { stop: 0x00, fiStop: 0x00, right: 0x01, // 0000 0001 left: 0x02, // 0000 0010 up: 0x08, // 0000 1000 down: 0x04, // 0000 0100 leftUp: 0x0A, // 0000 1010 leftDown: 0x06, // 0000 0110 rightUp: 0x09, // 0000 1001 rightDown: 0x05, // 0000 0101 zoomExpand: 0x10, // 镜头 放大 zoomNarrow: 0x20, // 镜头 缩小 apertureFar: 0x48, // 光圈 缩小 // apertureNear: 0x44, // 光圈 放大 focusFar: 0x42, // 聚焦 近 focusNear: 0x41, // 聚焦 远 setPos: 0x81, // 设置预设点 calPos: 0x82, // 调用预设点 delPos: 0x83, // 删除预设点 wiperOpen: 0x8C, // 雨刷开 wiperClose: 0x8D, // 雨刷关 setCruise: 0x84, // 加入巡航点 decCruise: 0x85, // 删除一个巡航点 cruiseStart: 0x88, // 开始巡航 cruiseStop: 0x00 // 停止巡航 // 透雾指令没有找到 }; const SPEED_ARRAY = [0x19, 0x32, 0x4b, 0x64, 0x7d, 0x96, 0xAF, 0xC8, 0xE1, 0xFA]; const POSITION_ARRAY = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10]; const ZOOM_ARRAY = [0x10, 0x30, 0x50, 0x70, 0x90, 0xA0, 0xB0, 0xC0, 0xd0, 0xe0]; // 获取 direction 方向型 /** * * @param options * type: * speed:default 5 * index: * @returns {string} */ function getPTZCmd(options) { const { type, speed = 5, index = 0 } = options; const ptzSpeed = getPTZSpeed(speed); let indexValue3, indexValue4, indexValue5, indexValue6; // 第四个字节。 indexValue3 = PTZ_CMD_TYPE[type]; if (!indexValue3) { return ''; } switch (type) { case PTZ_TYPE.up: case PTZ_TYPE.down: // 字节6 垂直控制速度相对值 indexValue5 = ptzSpeed; // 字节7 地址高四位ob0000_0000 // indexValue6 = 0x00; break; case PTZ_TYPE.apertureFar: case PTZ_TYPE.apertureNear: // 字节6 光圈速度 indexValue5 = ptzSpeed; // 字节7 地址高四位ob0000_0000 // indexValue6 = 0x00; break; case PTZ_TYPE.right: case PTZ_TYPE.left: // 字节5 水平控制速度相对值 indexValue4 = ptzSpeed; // 字节7 地址高四位ob0000_0000 // indexValue6 = 0x00; break; case PTZ_TYPE.focusFar: case PTZ_TYPE.focusNear: // 字节5 聚焦速度 indexValue4 = ptzSpeed; // 字节7 地址高四位ob0000_0000 // indexValue6 = 0x00; break; case PTZ_TYPE.leftUp: case PTZ_TYPE.leftDown: case PTZ_TYPE.rightUp: case PTZ_TYPE.rightDown: // 字节5 水平控制速度相对值 indexValue4 = ptzSpeed; // 字节6 垂直控制速度相对值 indexValue5 = ptzSpeed; // 字节7 地址高四位ob0000_0000 // indexValue6 = 0x00; break; case PTZ_TYPE.zoomExpand: case PTZ_TYPE.zoomNarrow: // 字节7 镜头变倍控制速度相对值 zoom indexValue6 = getZoomSpeed(speed); break; case PTZ_TYPE.calPos: case PTZ_TYPE.delPos: case PTZ_TYPE.setPos: // 第五个字节 00H // indexValue4 = 0x00; // 字节6 01H~FFH 位置。 indexValue5 = getPTZPositionIndex(index); break; case PTZ_TYPE.wiperClose: case PTZ_TYPE.wiperOpen: // 字节5为辅助开关编号,取值为“1”表示雨刷控制。 indexValue4 = 0x01; break; case PTZ_TYPE.cruiseStart: // 字节5表示巡航组号 01H~FFH 位置。 indexValue4 = getPTZPositionIndex(index); // 第六个字节 00H // indexValue5 = 0x00; break; } return ptzCmdToString(indexValue3, indexValue4, indexValue5, indexValue6); } function getPTZSpeed(speed) { speed = speed || 5; const speedIndex = speed - 1; const ptzSpeed = SPEED_ARRAY[speedIndex] || SPEED_ARRAY[4]; return ptzSpeed; } function getZoomSpeed(speed) { speed = speed || 5; const speedIndex = speed - 1; const ptzSpeed = ZOOM_ARRAY[speedIndex] || ZOOM_ARRAY[4]; return ptzSpeed; } function getPTZPositionIndex(index) { return POSITION_ARRAY[index - 1]; } function ptzCmdToString(indexValue3, indexValue4, indexValue5, indexValue6) { // let cmd = []; // 首字节以05H开头 cmd[0] = 0xA5; // 组合码,高4位为版本信息v1.0,版本信息0H,低四位为校验码 cmd[1] = 0x0F; // 校验码 = (cmd[0]的高4位+cmd[0]的低4位+cmd[1]的高4位)%16 cmd[2] = 0x01; cmd[3] = 0x00; // 默认值 cmd[4] = 0x00; // 默认值 cmd[5] = 0x00; // 默认值 cmd[6] = 0x00; // 默认值 // if (indexValue3) { cmd[3] = indexValue3; } if (indexValue4) { cmd[4] = indexValue4; } if (indexValue5) { cmd[5] = indexValue5; } if (indexValue6) { cmd[6] = indexValue6; } cmd[7] = (cmd[0] + cmd[1] + cmd[2] + cmd[3] + cmd[4] + cmd[5] + cmd[6]) % 256; return bytes2HexString(cmd); } function bytes2HexString(byte) { let hexs = ""; for (let i = 0; i < byte.length; i++) { let hex = byte[i].toString(16); if (hex.length === 1) { hex = '0' + hex; } hexs += hex.toUpperCase(); } return hexs; } const LOG_MAX_SIZE = 200 * 1024; const SIMPLE_TYPE = ['Boolean', 'Number', 'String', 'Undefined', 'Null', 'Date', 'Object']; function reduceDepth(val) { if (typeof val !== 'object') { return val; } const objType = Object.prototype.toString.call(val).slice(8, -1); switch (objType) { case 'Array': case 'Uint8Array': case 'ArrayBuffer': return objType + '[' + val.length + ']'; case 'Object': return '{}'; default: return objType; } } function logable(obj, maxDepth, depth) { if (!depth) depth = 1; if (!maxDepth) maxDepth = 2; const result = {}; if (!obj || typeof obj !== 'object') { return obj; } const objType = Object.prototype.toString.call(obj).slice(8, -1); if (!SIMPLE_TYPE.includes(objType)) { return objType; } if (depth > maxDepth) { return undefined; } for (const key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { if (depth === maxDepth) { result[key] = reduceDepth(obj[key]); } else if (typeof obj[key] === 'object') { result[key] = logable(obj[key], maxDepth, depth + 1); } else { result[key] = obj[key]; } } } return result; } function nowTime() { return new Date().toLocaleString(); } class MemoryLogger { constructor(player) { let config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; this.player = player; this.logMaxSize = (config === null || config === void 0 ? void 0 : config.logMaxSize) || LOG_MAX_SIZE; this.logSize = 0; this.logTextArray = []; } destroy() { this.clear(); } clear() { this.logSize = 0; this.logTextArray = []; } logCache() { let text = ''; try { for (var _len = arguments.length, logText = new Array(_len), _key = 0; _key < _len; _key++) { logText[_key] = arguments[_key]; } const finLogText = logText.map(item => logable(item)); text = '[JbPro] ' + nowTime() + JSON.stringify(finLogText); } catch (e) { return; } this.logSize += text.length; this.logTextArray.push(text); if (this.logSize > this.logMaxSize) { const delLog = this.logTextArray.shift(); this.logSize -= delLog.length; } } getLog() { return this.logTextArray.join('\n'); } getLogBlob() { const logText = this.getLog(); const blob = new Blob([logText], { type: 'text/plain' }); return blob; } download() { // 将数组里面的日志转换成字符串换行,并下载成.log文件 const logText = this.getLog(); this.clear(); const blob = new Blob([logText], { type: 'text/plain' }); saveAs(blob, 'JbPro-' + nowTime() + '.log'); } } class JessibucaPro extends Emitter { constructor() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; super(); /**@type {import('./constant').DEFAULT_JESSIBUCA_OPTIONS}*/ this._opt = {}; this.TAG_NAME = 'JbPro'; this.$container = null; Object.keys(options).forEach(key => { if (isUndefined(options[key])) { throw new Error(`JbPro option "${key}" can not be undefined`); } }); this.originalOptions = options; const defaultOptions = getDefaultJessibucaOptions(); let _opt = Object.assign({}, defaultOptions, options); // 禁用用户配置 url 参数。 _opt.url = ''; if (_opt.isMulti) { _opt.debugUuid = uuid4(); } this.debug = new Debug(this); this.debug.log('JbPro', 'init'); let $container = options.container; if (typeof options.container === 'string') { $container = document.querySelector(options.container); } if (!$container) { this.debug.error('JbPro', 'JbPro need container option and now container is', options.container); throw new Error('JbPro need container option'); } // check opt.decoder is valid if (_opt.decoder && isFalse(_opt.decoder.indexOf('decoder-pro.js') !== -1 || _opt.decoder.indexOf('decoder-pro-simd.js') !== -1)) { this.debug.error('JbPro', `JbPro decoder ${_opt.decoder} must be decoder-pro.js or decoder-pro-simd.js`); throw new Error(`JbPro decoder ${_opt.decoder} must be decoder-pro.js or decoder-pro-simd.js`); } // check container node name if ($container.nodeName === 'CANVAS' || $container.nodeName === 'VIDEO') { this.debug.error('JbPro', `JbPro container type can not be ${$container.nodeName} type`); throw new Error(`JbPro container type can not be ${$container.nodeName} type`); } if (_opt.videoBuffer >= _opt.heartTimeout) { this.debug.error('JbPro', `JbPro videoBuffer ${_opt.videoBuffer}s must be less than heartTimeout ${_opt.heartTimeout}s`); throw new Error(`JbPro videoBuffer ${_opt.videoBuffer}s must be less than heartTimeout ${_opt.heartTimeout}s`); } if (this._checkHasCreated($container)) { this.debug.error('JbPro', 'JbPro container has been created and can not be created again', $container); throw new Error(`JbPro container has been created and can not be created again`, $container); } $container.classList.add('jb-pro-container'); setElementDataset($container, CONTAINER_DATA_SET_KEY, uuid16()); if (isFalse(_opt.isLive)) { const $videoElement = document.createElement('video'); $videoElement.muted = true; $videoElement.setAttribute("controlsList", "nodownload"); $videoElement.disablePictureInPicture = 'disablePictureInPicture'; $videoElement.style.position = "absolute"; $videoElement.style.top = 0; $videoElement.style.left = 0; $videoElement.style.height = "100%"; $videoElement.style.width = '100%'; $container.appendChild($videoElement); this.$videoElement = $videoElement; this.$container = $container; this._opt = _opt; return; } delete _opt.container; // s -> ms if (isNotEmpty(_opt.videoBuffer)) { _opt.videoBuffer = Number(_opt.videoBuffer) * 1000; } // s -> ms if (isNotEmpty(_opt.videoBufferDelay)) { _opt.videoBufferDelay = Number(_opt.videoBufferDelay) * 1000; } // s -> ms if (isNotEmpty(_opt.networkDelay)) { _opt.networkDelay = Number(_opt.networkDelay) * 1000; } // s -> ms if (isNotEmpty(_opt.aiFaceDetectInterval)) { _opt.aiFaceDetectInterval = Number(_opt.aiFaceDetectInterval) * 1000; } // s -> ms if (isNotEmpty(_opt.aiObjectDetectInterval)) { _opt.aiObjectDetectInterval = Number(_opt.aiObjectDetectInterval) * 1000; } // setting if (isNotEmpty(_opt.timeout)) { if (isEmpty(_opt.loadingTimeout)) { _opt.loadingTimeout = _opt.timeout; } if (isEmpty(_opt.heartTimeout)) { _opt.heartTimeout = _opt.timeout; } } if (isNotEmpty(_opt.autoWasm)) { if (isEmpty(_opt.decoderErrorAutoWasm)) { _opt.decoderErrorAutoWasm = _opt.autoWasm; } if (isEmpty(_opt.hardDecodingNotSupportAutoWasm)) { _opt.hardDecodingNotSupportAutoWasm = _opt.autoWasm; } } if (isNotEmpty(_opt.aiFaceDetectLevel) && isEmpty(_opt.aiFaceDetectWidth)) { const width = AI_FACE_DETECTOR_LEVEL[_opt.aiFaceDetectLevel]; if (width) { _opt.aiFaceDetectWidth = width; } } if (isNotEmpty(_opt.aiObjectDetectLevel) && isEmpty(_opt.aiObjectDetectWidth)) { const width = AI_OBJECT_DETECTOR_LEVEL[_opt.aiObjectDetectLevel]; if (width) { _opt.aiObjectDetectWidth = width; } } if (isTrue(_opt.isCrypto)) { _opt.isM7sCrypto = true; } this._opt = _opt; this._destroyed = false; this.$container = $container; this._tempPlayBgObj = {}; this._tempVideoLastIframeInfo = {}; this._tempPlayerIsMute = true; // 默认是禁音状态 this._loadingTimeoutReplayTimes = 0; this._heartTimeoutReplayTimes = 0; this.events = new Events$1(this); if (this._opt.isUseNewFullscreenWatermark) { this.watermark = new WatermarkV2(this); } else { this.watermark = new Watermark(this); } this.memoryLogger = new MemoryLogger(this); this._websocket1006ErrorRetryLog = []; this._mseDecodeErrorRetryLog = []; this._wcsDecodeErrorRetryLog = []; this._initPlayer($container, _opt); this._initWatermark(); this.debug.log('JbPro', `init success and version is ${proVersionTime}`); console.log(`JbPro Version is ${proVersionTime}`); } destroy() { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'destroy()'); this._destroyed = true; this.off(); // just for isLive = false if (this.$videoElement) { this.$videoElement.pause(); this.$videoElement.currentTime = 0; if (this.$videoElement.srcObject) { this.$videoElement.srcObject = null; this.$videoElement.removeAttribute('srcObject'); } if (this.$videoElement.src) { this.$videoElement.src = ''; this.$videoElement.removeAttribute('src'); } if (this.$container) { this.$container.removeChild(this.$videoElement); } this.$videoElement = null; } if (this.player) { this.player.destroy().then(() => { this.player = null; this._destroy(); setTimeout(() => { resolve(); }, 0); }).catch(() => { reject(); }); } else { this._destroy(); setTimeout(() => { resolve(); }, 0); } }); } _destroy() { if (this.events) { this.events.destroy(); this.events = null; } if (this.talk) { this.talk.destroy(); this.talk = null; } if (this.watermark) { this.watermark.destroy(); this.watermark = null; } if (this.memoryLogger) { this.memoryLogger.destroy(); this.memoryLogger = null; } if (this.$container) { this.$container.classList.remove('jb-pro-container'); this.$container.classList.remove('jb-pro-fullscreen-web'); removeElementDataset(this.$container, CONTAINER_DATA_SET_KEY); this.$container = null; } this._resetOpt(); this._tempPlayBgObj = null; this._tempVideoLastIframeInfo = null; this._tempPlayerIsMute = true; this._resetReplayTimes(); this.debug && this.debug.log('JbPro', 'destroy end'); } _resetOpt() { this._opt = getDefaultJessibucaOptions(); } _resetReplayTimes() { this._loadingTimeoutReplayTimes = 0; this._heartTimeoutReplayTimes = 0; this._websocket1006ErrorRetryLog = []; this._mseDecodeErrorRetryLog = []; this._wcsDecodeErrorRetryLog = []; } _getOriginalOpt() { const _opt = getDefaultJessibucaOptions(); return Object.assign({}, _opt, this.originalOptions); } _initPlayer($container, options) { this.player = new Player($container, options); this._bindEvents(); } _initTalk() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; if (this.talk) { this.talk.destroy(); this.talk = null; } if (this.player) { options.debug = this.player._opt.debug; } this.talk = new Talk(this.player, options); this.debug.log('JbPro', '_initTalk', this.talk.getOption()); this._bindTalkEvents(); } _resetPlayer() { let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; return new Promise((resolve, reject) => { this.debug.log(this.TAG_NAME, '_resetPlayer()', JSON.stringify(options)); const _init = () => { this._opt.url = ''; // reset url this._opt.playOptions = {}; // reset playOptions this._opt = Object.assign(this._opt, options); this._initPlayer(this.$container, this._opt); }; if (this.player) { this.player.destroy().then(() => { this.player = null; _init(); setTimeout(() => { this.debug.log(this.TAG_NAME, '_resetPlayer() end'); resolve(); }, 0); }); } else { _init(); setTimeout(() => { this.debug.log(this.TAG_NAME, '_resetPlayer() end'); resolve(); }, 0); } }); } _bindEvents() { var _this = this; // 对外的事件 Object.keys(JESSIBUCA_EVENTS).forEach(key => { this.player.on(JESSIBUCA_EVENTS[key], function () { for (var _len = arguments.length, value = new Array(_len), _key = 0; _key < _len; _key++) { value[_key] = arguments[_key]; } _this.emit(key, ...value); }); }); if (this._opt.playFailedAndPausedShowMessage) { this.on(EVENTS.playFailedAndPaused, code => { this.player && this.player.showTipsMessageByCode(code); }); } this.player.once(EVENTS.beforeDestroy, () => { // 单独的事件 this.emit(EVENTS.close); this.destroy().then(() => {}).catch(e => {}); }); // fullscreen watermark this.player.on(EVENTS.resize, () => { if (this.watermark) { this.watermark.resize(); } }); this.player.on(EVENTS.fullscreen, () => { if (this.watermark) { this.watermark.resize(); } }); this.player.on(EVENTS.videoInfo, () => { if (this.player) { if (this.player.singleWatermark) { this.player.singleWatermark.resize(); } if (this.player.ghostWatermark) { this.player.ghostWatermark.resize(); } if (this.player.dynamicWatermark) { this.player.dynamicWatermark.resize(); } } }); this.player.on(EVENTS.memoryLog, function () { _this.memoryLogger.logCache(...arguments); }); this.player.on(EVENTS.downloadMemoryLog, () => { this.downloadMemoryLog(); }); } _bindTalkEvents() { // 对外的事件 Object.keys(TALK_EVENTS).forEach(key => { this.player.on(TALK_EVENTS[key], value => { this.emit(key, value); }); }); } _initWatermark() { if (isNotEmptyObject(this._opt.fullscreenWatermarkConfig)) { const config = formatFullscreenWatermarkOptions(this.$container, this._opt.fullscreenWatermarkConfig); if (!config.watermark_txt) { this.debug.warn('JbPro', 'fullscreenWatermarkConfig text is empty'); return; } this.watermark.load(config); } } // check player has created on this elements _checkHasCreated(element) { if (!element) return false; const gbProV = getElementDataset(element, CONTAINER_DATA_SET_KEY); if (gbProV) { return true; } return false; } // is destroyed isDestroyed() { return this._destroyed; } /** * * @returns {{}|({rotate: number, isResize: boolean, playbackForwardMaxRateDecodeIFrame: number, loadingTimeoutReplayTimes: number, wasmDecodeErrorReplay: boolean, demuxType: string, useWCS: boolean, useOffscreen: boolean, loadingTimeout: number, playbackConfig: {playList: [], fps: string}, timeout: number, wasmUseVideoRender: boolean, heartTimeoutReplay: boolean, isNotMute: boolean, protocol: number, useMSE: boolean, operateBtns: {play: boolean, fullscreen: boolean, record: boolean, screenshot: boolean, audio: boolean}, isHls: boolean, hotKey: boolean, heartTimeout: number, isFlv: boolean, openWebglAlignment: boolean, hasVideo: boolean, loadingTimeoutReplay: boolean, hasAudio: boolean, debug: boolean, loadingText: string, useWasm: boolean, keepScreenOn: boolean, isFullResize: boolean, watermarkConfig: {}, forceNoOffscreen: boolean, videoBuffer: number, decoder: string, isWebrtc: boolean, url: string, showBandwidth: boolean, hiddenAutoPause: boolean, autoWasm: boolean, heartTimeoutReplayTimes: number, playType: string, background: string, hasControl: boolean, controlAutoHide: boolean, playbackDelayTime: number, playbackFps: number, supportDblclickFullscreen: boolean, wcsUseVideoRender: boolean} & *)} */ getOption() { if (this.player) { return this.player.getOption(); } return {}; } /** * 是否开启控制台调试打印 * @param value {Boolean} */ setDebug(value) { this.debug.log('JbPro', `setDebug() ${value}`); this._opt.debug = !!value; if (this.player) { this.player.updateOption({ debug: !!value }, true); } else { this.debug.warn('JbPro', 'player is not init'); } } getIsDebug() { let result = false; if (this.player) { result = this.player._opt.debug; } return result; } /** * */ mute() { this.debug.log('JbPro', 'mute()'); this.player && this.player.mute(true); } /** * */ cancelMute() { this.debug.log('JbPro', 'cancelMute()'); this.player && this.player.mute(false); } /** * * @param value {number} */ setVolume(value) { this.debug.log('JbPro', `setVolume() ${value}`); this.player && (this.player.volume = value); } /** * 获取当前音量 * @returns {null} */ getVolume() { let result = null; if (this.player) { result = this.player.volume; result = parseFloat(result).toFixed(2); } return result; } /** * */ audioResume() { this.debug.log('JbPro', 'audioResume()'); if (this.player && this.player.audio) { this.player.audio.audioEnabled(true); } else { this.debug.warn('JbPro', 'audioResume error'); } } /** * 设置超时时长, 单位秒 在连接成功之前和播放中途,如果超过设定时长无数据返回,则回调timeout事件 * @param value {number} */ setTimeout(time) { this.debug.log('JbPro', `setTimeout() ${time}`); time = Number(time); if (isNaN(time)) { this.debug.warn('JbPro', `setTimeout error: ${time} is not a number`); return; } this._opt.timeout = time; this._opt.loadingTimeout = time; this._opt.heartTimeout = time; if (this.player) { this.player.updateOption({ timeout: time, loadingTimeout: time, heartTimeout: time }); } } /** * * @param type {number}: 0,1,2 */ setScaleMode(type) { this.debug.log('JbPro', `setScaleMode() ${type}`); if (this.player) { this.player.setScaleMode(type); } else { this.debug.warn('JbPro', 'setScaleMode() player is null'); } } /** * * @returns {Promise<commander.ParseOptionsResult.unknown>} */ pause() { let isClear = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; return new Promise((resolve, reject) => { this.debug.log('JbPro', `pause() ${isClear}`); if (this._opt.pauseAndNextPlayUseLastFrameShow || this._opt.replayUseLastFrameShow) { this._tempPlayBgObj = this._getVideoLastIframeInfo(); } this._tempPlayerIsMute = this.isMute(); this._pause(isClear).then(res => { resolve(res); }).catch(e => { reject(e); }); }); } /** * * @param isClear 默认 false,不清除画面 * @returns {Promise<unknown>} * @private */ _pause() { let isClear = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; return new Promise((resolve, reject) => { this.debug.log('JbPro', `_pause() ${isClear}`); if (this.isDestroyed()) { return reject('JbPro is destroyed'); } this._resetReplayTimes(); if (this.player) { this.player.pause(isClear).then(res => { resolve(res); }).catch(err => { reject(err); }); } else { reject('player is null'); } }); } /** * */ close() { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'close()'); // clear url this._opt.url = ''; // reset this._resetReplayTimes(); if (this.player) { this.player.close().then(() => { resolve(); }).catch(e => { reject(e); }); } else { reject(`player is null`); } }); } /** * */ clearView() { this.debug.log('JbPro', 'clearView()'); if (this.player && this.player.video) { if (this.getRenderType() === RENDER_TYPE.canvas) { this.player.video.clearView(); } else { this.debug.warn('JbPro', 'clearView', 'render type is video, not support clearView, please use canvas render type'); } } else { this.debug.warn('JbPro', 'clearView', 'player is null'); } } /** * * @param url {string} * @returns {Promise<unknown>} */ play() { let url = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ''; let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { this.debug.log('JbPro', `play() ${url}`, JSON.stringify(options)); if (!url && !this._opt.url) { this.emit(EVENTS.error, EVENTS_ERROR.playError); reject('url is null and this._opt.url is null'); return; } // if (url) { url = ('' + url).trim(); // if (url.indexOf('http:') === -1 && url.indexOf('https:') === -1 && url.indexOf('webrtc:') === -1 && url.indexOf('ws:') === -1 && url.indexOf('wss:') === -1 && url.indexOf('wt:') === -1 && url.indexOf('artc:') === -1) { return reject(`url ${url} must be "http:" or "https:" or "webrtc:" or "ws:" or "wss:" or "wt:" or "artc:" protocol`); } } // todo:这里校验url地址是否合法 // if(veriify(url)){ // reject('url is not valid'); // return ; // } if (isFalse(this._opt.isLive)) { this.$videoElement.controls = 'controls'; this.$videoElement.muted = false; this.$videoElement.src = url; this.$videoElement.play(); resolve(this.$videoElement); return; } if (this._opt.isM7sCrypto) { let cryptoKey = options.cryptoKey || this._opt.playOptions.cryptoKey; let cryptoIV = options.cryptoIV || this._opt.playOptions.cryptoIV; if (this._opt.m7sCryptoKey && !(cryptoKey && cryptoIV)) { const tempArray = this._opt.m7sCryptoKey.split('.'); cryptoKey = b64toUin8(tempArray[0]); cryptoIV = b64toUin8(tempArray[1]); } if (cryptoKey && cryptoIV) { this._opt.playOptions.cryptoKey = cryptoKey; this._opt.playOptions.cryptoIV = cryptoIV; options.cryptoIV = cryptoIV; options.cryptoKey = cryptoKey; } else { const _url = url || this._opt.url; this._cryptoPlay(_url).then(_ref => { let { cryptoIV, cryptoKey } = _ref; this._opt.playOptions.cryptoKey = cryptoKey; this._opt.playOptions.cryptoIV = cryptoIV; options.cryptoIV = cryptoIV; options.cryptoKey = cryptoKey; this._playBefore(url, options).then(() => { resolve(); }).catch(e => { reject(e); }); }).catch(e => { reject(e); }); return; } } else if (this._opt.isXorCrypto) { let cryptoKey = options.cryptoKey || this._opt.playOptions.cryptoKey; let cryptoIV = options.cryptoIV || this._opt.playOptions.cryptoIV; if (this._opt.xorCryptoKey && !(cryptoKey && cryptoIV)) { const tempArray = this._opt.xorCryptoKey.split('.'); cryptoKey = b64toUin8(tempArray[0]); cryptoIV = b64toUin8(tempArray[1]); } if (cryptoKey && cryptoIV) { this._opt.playOptions.cryptoKey = cryptoKey; this._opt.playOptions.cryptoIV = cryptoIV; options.cryptoIV = cryptoIV; options.cryptoKey = cryptoKey; } } this._playBefore(url, options).then(() => { resolve(); }).catch(e => { reject(e); }); }); } _playBefore(url, options) { return new Promise((resolve, reject) => { if (this.player) { if (url) { // url 相等的时候。 if (this._opt.url) { // 存在相同的 url if (url === this._opt.url) { // 正在播放 if (this.player.playing) { this.debug.log('JbPro', '_playBefore', 'playing and resolve()'); resolve(); } else { // pause -> play this.debug.log('JbPro', '_playBefore', 'this._opt.url === url and ' + 'pause -> play and destroy play'); let originalOpt = this._getOriginalOpt(); if (this._opt.pauseAndNextPlayUseLastFrameShow || this._opt.replayUseLastFrameShow) { if (this._tempPlayBgObj && this._tempPlayBgObj.loadingBackground) { originalOpt = Object.assign(originalOpt, this._tempPlayBgObj); } } // if previous is not mute, then play is not mute if (isFalse(this._tempPlayerIsMute)) { originalOpt.isNotMute = true; // reset this._tempPlayerIsMute = true; } const url = this._opt.url; const playOptions = this._opt.playOptions; this._resetPlayer(originalOpt).then(() => { this._play(url, playOptions).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore this.player.play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 1', e)); reject(e); }); }).catch(e => { this.debug.error('JbPro', '_resetPlayer error', e); }); } } else { this.debug.log('JbPro', '_playBefore', ` this._url.url is ${this._opt.url} and new url is ${url} and destroy and play new url`); // url 发生改变了, 有些参数就不适用了。 // 需要重置options const originalOpt = this._getOriginalOpt(); this._resetPlayer(originalOpt).then(() => { this._play(url, options).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore _play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 2', e)); reject(e); }); }).catch(e => { this.debug.error('JbPro', '_resetPlayer error', e); }); } } else { // this._opt.url is null new play this._play(url, options).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore _play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 3', e)); reject(e); }); } } else { // url 不存在的时候 // 就是从 play -> pause -> play let originalOpt = this._getOriginalOpt(); if (this._opt.pauseAndNextPlayUseLastFrameShow || this._opt.replayUseLastFrameShow) { if (this._tempPlayBgObj && this._tempPlayBgObj.loadingBackground) { originalOpt = Object.assign(originalOpt, this._tempPlayBgObj); } } // if previous is not mute, then play is not mute if (isFalse(this._tempPlayerIsMute)) { originalOpt.isNotMute = true; // reset this._tempPlayerIsMute = true; } const url = this._opt.url; const playOptions = this._opt.playOptions; this._resetPlayer(originalOpt).then(() => { this._play(url, playOptions).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore _play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 4', e)); reject(e); }); }).catch(e => { this.debug.error('JbPro', '_resetPlayer error', e); }); } } else { // this.player is null if (url) { // new play this._play(url, options).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore _play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 5', e)); reject(e); }); } else { this._play(this._opt.url, this._opt.playOptions).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_playBefore _play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('this.player.play 6', e)); reject(e); }); } } }); } _cryptoPlay(url) { return new Promise((resolve, reject) => { const relativePath = getUrlRelativePath(url); let cryptoKeyUrl = this._opt.cryptoKeyUrl; let href = ''; const urlObj = resolveUrl(url); if (cryptoKeyUrl) { href = cryptoKeyUrl; if (this._opt.isM7sCrypto && href.indexOf(`${CRYPTO_KEY_URL_PATH}?stream=`) === -1) { const cryptoKeyUrlObj = resolveUrl(cryptoKeyUrl); href = cryptoKeyUrlObj.origin + CRYPTO_KEY_URL_PATH + `?stream=${relativePath}`; } } else { cryptoKeyUrl = urlObj.origin + CRYPTO_KEY_URL_PATH; href = cryptoKeyUrl + `?stream=${relativePath}`; } this.player.debug.log('JbPro', `_cryptoPlay() cryptoKeyUrl: ${href} and opt.cryptoKeyUrl: ${this._opt.cryptoKeyUrl}`); getM7SCryptoStreamKey(href).then(res => { if (res) { const tempArray = res.split('.'); const cryptoKey = b64toUin8(tempArray[0]); const cryptoIV = b64toUin8(tempArray[1]); if (cryptoIV && cryptoKey) { resolve({ cryptoIV, cryptoKey }); } else { reject(`get cryptoIV or cryptoKey error`); } } else { reject(`cryptoKeyUrl: getM7SCryptoStreamKey ${href} res is null`); } }).catch(e => { reject(e); }); }); } /** * playback 录像回放 * @param url {string} */ playback(url) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { this.debug.log('JbPro', `playback() ${url}, options: ${JSON.stringify(options)}`); if (isFalse(this._opt.isLive)) { return reject(`this._opt.isLive is false, can not playback`); } const defaultOptions = getDefaultPlayerOptions(); const playbackConfig = Object.assign({}, defaultOptions.playbackConfig, this._opt.playbackConfig, options); if (!playbackConfig.isUseFpsRender) { if (playbackConfig.isCacheBeforeDecodeForFpsRender) { playbackConfig.isCacheBeforeDecodeForFpsRender = false; this.debug.warn('JbPro', 'playbackConfig.isUseFpsRender is false, isCacheBeforeDecodeForFpsRender can not be ture, isCacheBeforeDecodeForFpsRender is set to false'); } } if (playbackConfig.rateConfig.length === 0) { if (playbackConfig.showRateBtn) { playbackConfig.showRateBtn = false; this.debug.warn('JbPro', 'playbackConfig.rateConfig.length is 0, showRateBtn can not be ture, showRateBtn is set to false'); } } // if (playbackConfig.controlType === PLAYBACK_CONTROL_TYPE.simple) ; this._resetPlayer({ videoBuffer: 0, // 录播的时候不缓存。 playbackConfig, playType: PLAY_TYPE.playbackTF, openWebglAlignment: true, useMSE: playbackConfig.useMSE, // useWCS: playbackConfig.useWCS, // 录播的时候使用 WCS 解码器 useSIMD: true // 录播的时候默认使用 SIMD 解码器 }).then(() => { this.play(url, options).then(() => { resolve(); }).catch(e => { reject(e); }); }).catch(e => { reject(e); }); }); } // playback pause playbackPause() { let isPause = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; this.debug.log('JbPro', `playbackPause() ${isPause}`); if (this._opt.playType === PLAY_TYPE.player) { return Promise.reject(`playType is player, can not call playbackPause method`); } return new Promise((resolve, reject) => { if (!this.player) { return reject('player is null'); } if (isTrue(isPause)) { this._pause().then(() => { resolve(); }).catch(e => { reject(e); }); } else { this.player.playbackPause = true; resolve(); } }); } // playback pause - > resume playbackResume() { this.debug.log('JbPro', `playbackResume()`); if (this._opt.playType === PLAY_TYPE.player) { return Promise.reject('playType is player, can not call playbackResume method'); } return new Promise((resolve, reject) => { if (!this.player) { return reject('player is null'); } this.player.playbackPause = false; resolve(); }); } /** * playback 快放 1倍,2倍,4倍,8倍,16倍,32倍 支持范围 0.1 - 32 * @param rate * @returns {Promise<unknown>} */ forward(rate) { this.debug.log('JbPro', `forward() ${rate}`); if (isFalse(this._opt.isLive) || this._opt.playType === PLAY_TYPE.player) { return Promise.reject('forward() method only just for playback type'); } if (!isNumber(Number(rate))) { return Promise.reject(`forward() params "rate": ${rate} must be number type`); } return new Promise((resolve, reject) => { if (this.player) { rate = clamp(Number(rate), 0.1, 32); // update config this.player.decoderWorker && this.player.decoderWorker.updateWorkConfig({ key: 'playbackRate', value: rate }); this.player.playback.setRate(rate); this.player.video && this.player.video.setRate(rate); this.player.audio && this.player.audio.setRate(rate); if (this.player.isPlaybackUseWCS() || this.player.isPlaybackUseMSE()) { this.player.demux.dropBuffer$2(); if (this.player.isPlaybackCacheBeforeDecodeForFpsRender()) { this.player.demux.initPlaybackCacheLoop(); } } resolve(); } else { reject('player is not playing'); } }); } /** * 等同 forward 方法。 * @param rate * @returns {Promise<*>} */ playbackForward(rate) { return this.forward(rate); } /** * playback 快放->恢复 * @returns {Promise<unknown>} */ normal() { return this.forward(1); } /** * 等同 normal 方法。 * @returns {Promise<*>} */ playbackNormal() { return this.normal(); } /** * playback 更新TF卡流只解码i帧播放倍率,支持playback()之前调用。 * @param rate */ updatePlaybackForwardMaxRateDecodeIFrame(rate) { this.debug.log('JbPro', `updatePlaybackForwardMaxRateDecodeIFrame() ${rate}`); rate = Number(rate); rate = parseInt(rate, 10); rate = clamp(rate, 1, 8); this._opt.playbackForwardMaxRateDecodeIFrame = rate; if (this.player) { this.player.updateOption({ playbackForwardMaxRateDecodeIFrame: rate }, true); } else { this.debug.warn('JbPro', `updatePlaybackForwardMaxRateDecodeIFrame() player is null`); } } /** * playback set playback start time and clear buffer * for seek callback * @param timestamp */ setPlaybackStartTime(timestamp) { this.debug.log('JbPro', `setPlaybackStartTime() ${timestamp}`); const strLength = getStrLength(timestamp); if (!this.player) { this.debug.warn('JbPro', 'setPlaybackStartTime() player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'setPlaybackStartTime() playType is not playback'); return; } if (strLength < 10 && timestamp !== 0 && this.player.playback.isControlTypeNormal()) { this.debug.warn('JbPro', `setPlaybackStartTime() control type is normal and timestamp: ${timestamp} is not valid`); return; } if (this.player.playback.isControlTypeSimple() && timestamp > this.player.playback.totalDuration) { this.debug.warn('JbPro', `setPlaybackStartTime() control type is simple and timestamp: ${timestamp} is more than ${this.player.playback.totalDuration}`); return; } if (this.player.playing) { if (this.player.playback.isControlTypeNormal()) { if (strLength === 10) { timestamp = timestamp * 1000; } } this.player.playback.setStartTime(timestamp); this.playbackClearCacheBuffer(); } } /** * playback set playback show precision * @param showPrecision */ setPlaybackShowPrecision(showPrecision) { this.debug.log('JbPro', `setPlaybackShowPrecision() ${showPrecision}`); if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'playType is not playback'); return; } if (!this.player.playback.isControlTypeNormal()) { this.debug.warn('JbPro', 'control type is not normal , not support!'); return; } this.player.playback.setShowPrecision(showPrecision); } playbackCurrentTimeScroll() { this.debug.log('JbPro', `playbackCurrentTimeScroll()`); if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'playType is not playback'); return; } if (!this.player.playback.isControlTypeNormal()) { this.debug.warn('JbPro', 'control type is not normal , not support!'); return; } this.player.playback.currentTimeScroll(); } /** * */ playbackClearCacheBuffer() { this.debug.log('JbPro', `playbackClearCacheBuffer()`); if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'playType is not playback'); return; } this.player.video && this.player.video.clear(); this.player.audio && this.player.audio.clear(); this.clearBufferDelay(); } getPlaybackCurrentRate() { if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'playType is not playback'); return; } return this.player.getPlaybackRate(); } /** * * @param timestamp 单位 ms */ updatePlaybackLocalOneFrameTimestamp(timestamp) { this.debug.log('JbPro', `updatePlaybackLocalOneFrameTimestamp() ${timestamp}`); if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player.isPlayback()) { this.debug.warn('JbPro', 'playType is not playback'); return; } this.player.playback.updateLocalOneFrameTimestamp(timestamp); } /** * * @param quality */ setStreamQuality(quality) { this.debug.log('JbPro', `setStreamQuality() ${quality}`); if (!this.player) { this.debug.warn('JbPro', 'player is null'); return; } if (!this.player._opt.operateBtns.quality) { this.debug.warn('JbPro', 'player._opt.operateBtns.quality is false'); return; } const qualityList = this.player._opt.qualityConfig || []; if (qualityList.includes(quality)) { this.player.streamQuality = quality; } else { this.debug.warn('JbPro', `quality: ${quality} is not in qualityList`); } } /** * * @param url {string} * @returns {Promise<unknown>} * @private */ _play() { let url = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ''; let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { if (!this.player) { return reject('player is null'); } // check need reset player let needResetPlayer = false; if (this._opt.url && this._opt.url !== url) { needResetPlayer = true; } this._opt.url = url; this._opt.playOptions = options; // remove search const urlPath = url.split("?")[0]; // 新的url const isHttp = urlPath.startsWith('http://') || urlPath.startsWith('https://'); const isWebrtc = urlPath.startsWith('webrtc://'); const isAliyunRtc = urlPath.startsWith('artc://'); const isWebTransport = urlPath.startsWith('wt://'); const isWebsocket = urlPath.startsWith('ws://') || urlPath.startsWith('wss://'); const isHttpOrWebsocket = isHttp || isWebsocket; const isHls = isHttp && urlPath.endsWith('.m3u8'); const isFlv = isHttpOrWebsocket && urlPath.endsWith(".flv"); const isFmp4 = isHttpOrWebsocket && (urlPath.endsWith(".fmp4") || urlPath.endsWith(".mp4")); const isMpeg4 = isHttpOrWebsocket && urlPath.endsWith(".mpeg4"); const isNakedFlow = isHttpOrWebsocket && (urlPath.endsWith(".h264") || urlPath.endsWith(".h265")); const isTs = isHttpOrWebsocket && urlPath.endsWith(".ts"); let isWebrtcForZLM = this._opt.isWebrtcForZLM || false; let isWebrtcForSRS = this._opt.isWebrtcForSRS || false; let isWebrtcForOthers = this._opt.isWebrtcForOthers || false; if (isWebrtc) { if (url.indexOf('/index/api/webrtc') !== -1) { isWebrtcForZLM = true; isWebrtcForSRS = false; isWebrtcForOthers = false; } else if (url.indexOf('/rtc/v1/play/') !== -1) { isWebrtcForSRS = true; isWebrtcForZLM = false; isWebrtcForOthers = false; } } // let protocol = null; let demuxType = null; if (isFlv && isFalse(this._opt.isFlv)) { this._resetDemuxType('isFlv'); } if (isFmp4 && isFalse(this._opt.isFmp4)) { this._resetDemuxType('isFmp4'); } if (isMpeg4 && isFalse(this._opt.isMpeg4)) { this._resetDemuxType('isMpeg4'); } if (isNakedFlow && isFalse(this._opt.isNakedFlow)) { this._resetDemuxType('isNakedFlow'); } if (isTs && isFalse(this._opt.isTs)) { this._resetDemuxType('isTs'); } // protocol if (isHttp) { if (isHls) { protocol = PLAYER_PLAY_PROTOCOL.hls; } else { protocol = PLAYER_PLAY_PROTOCOL.fetch; } } else if (isWebTransport) { protocol = PLAYER_PLAY_PROTOCOL.webTransport; } else { if (isWebrtc) { protocol = PLAYER_PLAY_PROTOCOL.webrtc; } else if (isAliyunRtc) { protocol = PLAYER_PLAY_PROTOCOL.aliyunRtc; } else { protocol = PLAYER_PLAY_PROTOCOL.websocket; } } if (this._opt.isNakedFlow) { demuxType = DEMUX_TYPE.nakedFlow; } else if (this._opt.isFmp4) { demuxType = DEMUX_TYPE.fmp4; } else if (this._opt.isMpeg4) { demuxType = DEMUX_TYPE.mpeg4; } else if (this._opt.isFlv) { demuxType = DEMUX_TYPE.flv; } else if (this._opt.isTs) { demuxType = DEMUX_TYPE.ts; } else { if (isHls) { demuxType = DEMUX_TYPE.hls; } else if (isWebrtc) { demuxType = DEMUX_TYPE.webrtc; } else if (isAliyunRtc) { demuxType = DEMUX_TYPE.aliyunRtc; } else if (isWebTransport) { demuxType = DEMUX_TYPE.webTransport; } else { // m7s for websocket if (isWebsocket) { demuxType = DEMUX_TYPE.m7s; } } } if (!(protocol && demuxType)) { if (this._opt.playFailedAndPausedShowMessage) { this.showErrorMessageTips('url is not support'); } return reject(`play protocol is ${PLAYER_PLAY_PROTOCOL_LIST[protocol]}, demuxType is ${demuxType}`); } // this.debug.log('JbPro', `play protocol is ${PLAYER_PLAY_PROTOCOL_LIST[protocol]}, demuxType is ${demuxType}`); const _playNext = () => { // webgl this.player.once(EVENTS_ERROR.webglAlignmentError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webglAlignmentError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webglAlignmentError, error)); if (this.player && this.player._opt.webglAlignmentErrorReplay) { this.debug.log('JbPro', 'webglAlignmentError'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ openWebglAlignment: true }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webglAlignmentError and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglAlignmentError, {}, error); // reject(); this.debug.error('JbPro', 'webglAlignmentError and play error', e); }); }).catch(e => { this.debug.error('JbPro', 'webglAlignmentError and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglAlignmentError, {}, error); this.debug.log('JbPro', 'webglAlignmentError and webglAlignmentErrorReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglAlignmentError, {}, error); this.debug.error('JbPro', 'webglAlignmentError and pause error', e); }); } }); // webgl this.player.once(EVENTS_ERROR.webglContextLostError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webglContextLostError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webglContextLostError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.webglContextLostErrorReplay) { this.debug.log('JbPro', 'webglContextLostError'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webglContextLostError and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglContextLostError, lastFrameInfo, error); // reject(); this.debug.error('JbPro', 'webglContextLostError and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglContextLostError, lastFrameInfo, error); this.debug.error('JbPro', 'webglContextLostError and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglContextLostError, lastFrameInfo, error); this.debug.log('JbPro', 'webglContextLostError and webglContextLostErrorReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webglContextLostError, lastFrameInfo, error); this.debug.error('JbPro', 'webglAlignmentError and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mediaSourceH265NotSupport, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceH265NotSupport but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceH265NotSupport, error)); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'mediaSourceH265NotSupport auto wasm [mse-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ useMSE: false, useWCS: false }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceH265NotSupport auto wasm [mse-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceH265NotSupport, error); // reject(); this.debug.error('JbPro', 'mediaSourceH265NotSupport auto wasm [mse-> wasm] reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceH265NotSupport, {}, error); this.debug.error('JbPro', 'mediaSourceH265NotSupport auto wasm [mse-> wasm] _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceH265NotSupport, {}, error); this.debug.log('JbPro', 'mediaSourceH265NotSupport and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceH265NotSupport, {}, error); this.debug.error('JbPro', 'mediaSourceH265NotSupport and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mediaSourceFull, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceFull but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceFull, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mseDecodeErrorReplay) { const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; let decoderErrorAutoWasm = this.player._opt.decoderErrorAutoWasm; let isMeaningfulRetry = true; if (decoderErrorAutoWasm) { options = { useMSE: false, useWCS: false }; } else { if (this._checkIsMeaningfulRetry(RETRY_TYPE.mseDecodeError)) { this._mseDecodeErrorRetryLog.push(now$2()); } else { // 降级到wasm decoderErrorAutoWasm = true; isMeaningfulRetry = false; options = { useMSE: false, useWCS: false }; } } this.debug.log('JbPro', `mediaSourceFull and auto wasm ${isFalse(isMeaningfulRetry) ? ' and is not meaningful Retry' : ''} [mse-> ${decoderErrorAutoWasm ? "wasm" : "mse"}] reset player and play`); if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceFull and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceFull, lastFrameInfo, error); // reject(); this.debug.error('JbPro', 'mediaSourceFull and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceFull, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceFull and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceFull, lastFrameInfo, error); this.debug.log('JbPro', 'mediaSourceFull and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceFull, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceFull and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mediaSourceAppendBufferError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceAppendBufferError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceAppendBufferError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mseDecodeErrorReplay) { const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; let decoderErrorAutoWasm = this.player._opt.decoderErrorAutoWasm; let isMeaningfulRetry = true; if (decoderErrorAutoWasm) { options = { useMSE: false, useWCS: false }; } else { if (this._checkIsMeaningfulRetry(RETRY_TYPE.mseDecodeError)) { this._mseDecodeErrorRetryLog.push(now$2()); } else { // 降级到wasm decoderErrorAutoWasm = true; isMeaningfulRetry = false; options = { useMSE: false, useWCS: false }; } } // if (this.player.isMSEAudioDecoderError) { this.player.debug.log('JbPro', 'mediaSourceAppendBufferError and isMSEAudioDecoderError is true so set mseDecodeAudio = false'); options.mseDecodeAudio = false; } this.debug.log('JbPro', `mediaSourceAppendBufferError and auto wasm ${isFalse(isMeaningfulRetry) ? ' and is not meaningful Retry' : ''} [mse-> ${decoderErrorAutoWasm ? "wasm" : "mse"}] reset player and play`); if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceAppendBufferError and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAppendBufferError, lastFrameInfo, error); // reject(); this.debug.error('JbPro', 'mediaSourceAppendBufferError and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAppendBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAppendBufferError and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAppendBufferError, lastFrameInfo, error); this.debug.log('JbPro', 'mediaSourceAppendBufferError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAppendBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAppendBufferError and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mseSourceBufferError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mseSourceBufferError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mseSourceBufferError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mseDecodeErrorReplay) { const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; // 如果是worker 线程解码失败,优先降级到主线程解码 if (this.player._opt.mseDecoderUseWorker) { options = { mseDecoderUseWorker: false }; this.debug.log('JbPro', `mseSourceBufferError auto wasm [mse worker -> mse] reset player and play`); } else { let decoderErrorAutoWasm = this.player._opt.decoderErrorAutoWasm; let isMeaningfulRetry = true; if (decoderErrorAutoWasm) { options = { useMSE: false, useWCS: false }; } else { if (this._checkIsMeaningfulRetry(RETRY_TYPE.mseDecodeError)) { this._mseDecodeErrorRetryLog.push(now$2()); } else { // 降级到wasm decoderErrorAutoWasm = true; isMeaningfulRetry = false; options = { useMSE: false, useWCS: false }; } } if (this.player.isMSEVideoDecoderInitializationFailedNotSupportHevc) { this.debug.log('JbPro', `mseSourceBufferError and isMSEVideoDecoderInitializationFailedNotSupportHevc is true so auto wasm`); options = { useMSE: false, useWCS: false }; } this.debug.log('JbPro', `mseSourceBufferError auto wasm ${isFalse(isMeaningfulRetry) ? ' and is not meaningful Retry' : ''} [mse-> ${decoderErrorAutoWasm ? "wasm" : "mse"}] reset player and play`); } if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mseSourceBufferError reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseSourceBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mseSourceBufferError reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseSourceBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mseSourceBufferError _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseSourceBufferError, lastFrameInfo, error); this.debug.log('JbPro', 'mseSourceBufferError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseSourceBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mseSourceBufferError and pause error:', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mediaSourceBufferedIsZeroError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceBufferedIsZeroError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceBufferedIsZeroError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mseDecodeErrorReplay) { const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; let decoderErrorAutoWasm = this.player._opt.decoderErrorAutoWasm; let isMeaningfulRetry = true; if (decoderErrorAutoWasm) { options = { useMSE: false, useWCS: false }; } else { if (this._checkIsMeaningfulRetry(RETRY_TYPE.mseDecodeError)) { this._mseDecodeErrorRetryLog.push(now$2()); } else { // 降级到wasm decoderErrorAutoWasm = true; isMeaningfulRetry = false; options = { useMSE: false, useWCS: false }; } } this.debug.log('JbPro', `mediaSourceBufferedIsZeroError auto wasm ${isFalse(isMeaningfulRetry) ? ' and is not meaningful Retry' : ''} [mse-> ${decoderErrorAutoWasm ? "wasm" : "mse"}] reset player and play`); if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceBufferedIsZeroError reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceBufferedIsZeroError, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceBufferedIsZeroError reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceBufferedIsZeroError, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceBufferedIsZeroError _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceBufferedIsZeroError, lastFrameInfo, error); this.debug.log('JbPro', 'mediaSourceBufferedIsZeroError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceBufferedIsZeroError, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceBufferedIsZeroError and pause error:', e); }); } }); // MSE init error ,can not use mse decode only auto wasm this.player.once(EVENTS_ERROR.mseAddSourceBufferError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mseAddSourceBufferError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mseAddSourceBufferError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'mseAddSourceBufferError auto wasm [mse-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = { useMSE: false, useWCS: false }; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mseAddSourceBufferError auto wasm [mse-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseAddSourceBufferError, lastFrameInfo, error); // reject(); this.debug.error('JbPro', 'mseAddSourceBufferError auto wasm [mse-> wasm] reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseAddSourceBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mseAddSourceBufferError auto wasm [mse-> wasm] _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseAddSourceBufferError, lastFrameInfo, error); this.debug.log('JbPro', 'mseAddSourceBufferError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseAddSourceBufferError, lastFrameInfo, error); this.debug.error('JbPro', 'mseAddSourceBufferError and pause error', e); }); } }); // MSE init error ,can not use mse decode only auto wasm this.player.once(EVENTS_ERROR.mediaSourceDecoderConfigurationError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceDecoderConfigurationError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceDecoderConfigurationError, error)); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'mediaSourceDecoderConfigurationError auto wasm [mse-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = { useMSE: false, useWCS: false }; this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceDecoderConfigurationError auto wasm [mse-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceDecoderConfigurationError, error); this.debug.error('JbPro', 'mediaSourceDecoderConfigurationError auto wasm [mse-> wasm] reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceDecoderConfigurationError, error); this.debug.error('JbPro', 'mediaSourceDecoderConfigurationError auto wasm [mse-> wasm] _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceDecoderConfigurationError, error); this.debug.log('JbPro', 'mediaSourceDecoderConfigurationError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceDecoderConfigurationError, error); this.debug.error('JbPro', 'mediaSourceDecoderConfigurationError and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mediaSourceTsIsMaxDiff, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceTsIsMaxDiff but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceTsIsMaxDiff, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mediaSourceTsIsMaxDiffReplay) { this.debug.log('JbPro', 'mediaSourceTsIsMaxDiff reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } // support play and playback this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mediaSourceTsIsMaxDiff replay success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceTsIsMaxDiff, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceTsIsMaxDiff replay error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceTsIsMaxDiff, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceTsIsMaxDiff _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceTsIsMaxDiff, lastFrameInfo, error); this.debug.log('JbPro', 'mediaSourceTsIsMaxDiff and replay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceTsIsMaxDiff, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceTsIsMaxDiff and pause error', e); }); } }); // MSE this.player.once(EVENTS_ERROR.mseWidthOrHeightChange, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mseWidthOrHeightChange but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mseWidthOrHeightChange, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.widthOrHeightChangeReplay) { this.debug.log('JbPro', 'mseWidthOrHeightChange and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { if (this.player._opt.widthOrHeightChangeReplayDelayTime > 0) { setTimeout(() => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mseWidthOrHeightChange and widthOrHeightChangeReplayDelayTime but player is destroyed'); return; } this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mseWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'mseWidthOrHeightChange and reset player and play error', e); }); }, this.player._opt.widthOrHeightChangeReplayDelayTime * 1000); } else { this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mseWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'mseWidthOrHeightChange and reset player and play error', e); }); } }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'mseWidthOrHeightChange and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'mseWidthOrHeightChange and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mseWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'mseWidthOrHeightChange error and pause error', e); }); } }); // MSE // inner replay this.player.once(EVENTS_ERROR.mediaSourceAudioG711NotSupport, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceAudioG711NotSupport but player is destroyed'); return; } const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mediaSourceAudioG711NotSupportReplay) { this.debug.log('JbPro', 'mediaSourceAudioG711NotSupport and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = { mseDecodeAudio: false }; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mediaSourceAudioG711NotSupport and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioG711NotSupport, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioG711NotSupport and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioG711NotSupport, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioG711NotSupport and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioG711NotSupport, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioG711NotSupport and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioG711NotSupport, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioG711NotSupport error and pause error', e); }); } }); // MSE // inner replay this.player.once(EVENTS_ERROR.mediaSourceAudioInitTimeout, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceAudioInitTimeout but player is destroyed'); return; } const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mediaSourceAudioInitTimeoutReplay) { this.debug.log('JbPro', 'mediaSourceAudioInitTimeout and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = { mseDecodeAudio: false }; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mediaSourceAudioInitTimeout and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioInitTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioInitTimeout and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioInitTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioInitTimeout and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioInitTimeout, lastFrameInfo); this.debug.error('JbPro', 'mediaSourceAudioInitTimeout and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioInitTimeout, lastFrameInfo); this.debug.error('JbPro', 'mediaSourceAudioInitTimeout error and pause error', e); }); } }); // MSE // inner replay this.player.once(EVENTS_ERROR.mediaSourceAudioNoDataTimeout, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceAudioNoDataTimeout but player is destroyed'); return; } const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.mediaSourceAudioInitTimeoutReplay) { this.debug.log('JbPro', 'mediaSourceAudioNoDataTimeout and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = { mseDecodeAudio: false }; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { this.debug.log('JbPro', 'mediaSourceAudioNoDataTimeout and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioNoDataTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioNoDataTimeout and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioNoDataTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'mediaSourceAudioNoDataTimeout and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioNoDataTimeout, lastFrameInfo); this.debug.error('JbPro', 'mediaSourceAudioNoDataTimeout and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceAudioNoDataTimeout, lastFrameInfo); this.debug.error('JbPro', 'mediaSourceAudioNoDataTimeout error and pause error', e); }); } }); // stream fetch error this.player.once(EVENTS_ERROR.mediaSourceUseCanvasRenderPlayFailed, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'mediaSourceUseCanvasRenderPlayFailed but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.mediaSourceUseCanvasRenderPlayFailed, error)); if (this.player && this.player._opt.mediaSourceUseCanvasRenderPlayFailedReplay && this.player._opt.mediaSourceUseCanvasRenderPlayFailedReplayType) { this.debug.log('JbPro', `mediaSourceUseCanvasRenderPlayFailed relayType is ${this.player._opt.mediaSourceUseCanvasRenderPlayFailedReplayType} and reset player and play`); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let resetPlayerOptions = {}; const replayType = this.player._opt.mediaSourceUseCanvasRenderPlayFailedReplayType; if (replayType === RENDER_TYPE.canvas) { resetPlayerOptions = { useMSE: false, useWCS: false }; } else if (replayType === RENDER_TYPE.video) { resetPlayerOptions = { useVideoRender: true, useCanvasRender: false }; } this._resetPlayer(resetPlayerOptions).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'mediaSourceUseCanvasRenderPlayFailed and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceUseCanvasRenderPlayFailed, error); this.debug.error('JbPro', 'mediaSourceUseCanvasRenderPlayFailed and reset player and play error', e); }); }).catch(e => { this.debug.error('JbPro', 'mediaSourceUseCanvasRenderPlayFailed auto and _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.debug.log('JbPro', 'mediaSourceUseCanvasRenderPlayFailed and pause player success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.mediaSourceUseCanvasRenderPlayFailed, error); this.debug.error('JbPro', 'mediaSourceUseCanvasRenderPlayFailed and pause', e); }); } }); // webcodecs init error ,can not use mse decode only auto wasm this.player.once(EVENTS_ERROR.webcodecsH265NotSupport, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webcodecsH265NotSupport but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webcodecsH265NotSupport, error)); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'webcodecsH265NotSupport auto wasm [wcs-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ useMSE: false, useWCS: false }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webcodecsH265NotSupport auto wasm [wcs-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsH265NotSupport, error); this.debug.error('JbPro', 'webcodecsH265NotSupport auto wasm [wcs-> wasm] reset player and play error', e); }); }).catch(e => { this.debug.error('JbPro', 'webcodecsH265NotSupport auto wasm [wcs-> wasm] _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsH265NotSupport, error); this.debug.log('JbPro', 'webcodecsH265NotSupport and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsH265NotSupport, error); this.debug.error('JbPro', 'webcodecsH265NotSupport and pause error', e); }); } }); // webcodecs init error ,can not use mse decode only auto wasm this.player.once(EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webcodecsUnsupportedConfigurationError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error)); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'webcodecsUnsupportedConfigurationError auto wasm [wcs-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ useMSE: false, useWCS: false }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webcodecsUnsupportedConfigurationError auto wasm [wcs-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error); this.debug.error('JbPro', 'webcodecsUnsupportedConfigurationError auto wasm [wcs-> wasm] reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error); this.debug.error('JbPro', 'webcodecsUnsupportedConfigurationError auto wasm [wcs-> wasm] _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error); this.debug.log('JbPro', 'webcodecsUnsupportedConfigurationError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsUnsupportedConfigurationError, error); this.debug.error('JbPro', 'webcodecsUnsupportedConfigurationError and pause error', e); }); } }); // webcodecs this.player.once(EVENTS_ERROR.webcodecsDecodeConfigureError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webcodecsDecodeConfigureError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webcodecsDecodeConfigureError, error)); if (this.player && this.player._opt.hardDecodingNotSupportAutoWasm) { this.debug.log('JbPro', 'webcodecsDecodeConfigureError auto wasm [wcs-> wasm] reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ useMSE: false, useWCS: false }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webcodecsDecodeConfigureError auto wasm [wcs-> wasm] reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeConfigureError, error); this.debug.error('JbPro', 'webcodecsDecodeConfigureError auto wasm [wcs-> wasm] reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeConfigureError, error); this.debug.error('JbPro', 'webcodecsDecodeConfigureError auto wasm [wcs-> wasm] _resetPlayer and play error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeConfigureError, error); this.debug.log('JbPro', 'webcodecsDecodeConfigureError and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeConfigureError, error); this.debug.error('JbPro', 'webcodecsDecodeConfigureError and pause error', e); }); } }); // webcodecs this.player.once(EVENTS_ERROR.webcodecsDecodeError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webcodecsDecodeError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webcodecsDecodeError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.wcsDecodeErrorReplay) { const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; let decoderErrorAutoWasm = this.player._opt.decoderErrorAutoWasm; let isMeaningfulRetry = true; if (decoderErrorAutoWasm) { options = { useMSE: false, useWCS: false }; } else { if (this._checkIsMeaningfulRetry(RETRY_TYPE.wcsDecodeError)) { this._wcsDecodeErrorRetryLog.push(now$2()); } else { // 降级到wasm decoderErrorAutoWasm = true; isMeaningfulRetry = false; options = { useMSE: false, useWCS: false }; } } this.debug.log('JbPro', `webcodecs decode error autoWasm ${isFalse(isMeaningfulRetry) ? ' and is not meaningful Retry' : ''} [wcs-> ${decoderErrorAutoWasm ? "wasm" : "wcs"}] reset player and play`); if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webcodecs decode error reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'webcodecs decode error reset player and play error', e); }); }).catch(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'webcodecs decode error _resetPlayer error'); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeError, lastFrameInfo, error); this.debug.log('JbPro', 'webcodecs decode error and autoWasm is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webcodecsDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'webcodecs decode error and pause error', e); }); } }); // webcodecs this.player.once(EVENTS_ERROR.wcsWidthOrHeightChange, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wcsWidthOrHeightChange but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.wcsWidthOrHeightChange, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.widthOrHeightChangeReplay) { this.debug.log('JbPro', 'wcsWidthOrHeightChange and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { if (this._opt.widthOrHeightChangeReplayDelayTime > 0) { setTimeout(() => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wcsWidthOrHeightChange and widthOrHeightChangeReplayDelayTime but player is destroyed'); return; } this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wcsWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wcsWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wcsWidthOrHeightChange and reset player and play error', e); }); }, this._opt.widthOrHeightChangeReplayDelayTime * 1000); } else { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wcsWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wcsWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wcsWidthOrHeightChange and reset player and play error', e); }); } }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wcsWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wcsWidthOrHeightChange and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wcsWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wcsWidthOrHeightChange and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wcsWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wcsWidthOrHeightChange error and pause error', e); }); } }); // wasm。 this.player.once(EVENTS_ERROR.wasmDecodeError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wasmDecodeError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.wasmDecodeError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.wasmDecodeErrorReplay) { this.debug.log('JbPro', 'wasm decode error and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wasm decode error and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'wasm decode error and reset player and play error', e); }); }).catch(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'wasm decode error and _resetPlayer error'); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmDecodeError, lastFrameInfo, error); this.debug.log('JbPro', 'wasm decode error and wasmDecodeErrorReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'wasm decode error and pause error', e); }); } }); // wasm simd this.player.once(EVENTS_ERROR.simdDecodeError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'simdDecodeError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.simdDecodeError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.simdDecodeErrorReplay) { this.debug.log('JbPro', `simdDecodeError error simdDecodeErrorReplayType is ${this.player._opt.simdDecodeErrorReplayType} and reset player and play`); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.simdDecodeErrorReplayType === DECODE_TYPE.wasm) { options = { useSIMD: false }; } if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'simdDecodeError and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'simdDecodeError and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'simdDecodeError and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'simdDecodeError and simdDecodeErrorReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdDecodeError, lastFrameInfo, error); this.debug.error('JbPro', 'simdDecodeError error and pause error', e); }); } }); // wasm this.player.once(EVENTS_ERROR.wasmWidthOrHeightChange, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wasmWidthOrHeightChange but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.wasmWidthOrHeightChange, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.widthOrHeightChangeReplay) { this.debug.log('JbPro', 'wasmWidthOrHeightChange and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { if (this._opt.widthOrHeightChangeReplayDelayTime > 0) { setTimeout(() => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wasmWidthOrHeightChange and widthOrHeightChangeReplayDelayTime but player is destroyed'); return; } this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wasmWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wasmWidthOrHeightChange and reset player and play error', e); }); }, this._opt.widthOrHeightChangeReplayDelayTime * 1000); } else { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wasmWidthOrHeightChange and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wasmWidthOrHeightChange and reset player and play error', e); }); } }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wasmWidthOrHeightChange and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmWidthOrHeightChange, lastFrameInfo, error); this.debug.error('JbPro', 'wasmWidthOrHeightChange and _resetPlayer error', e); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmWidthOrHeightChange, lastFrameInfo); this.debug.error('JbPro', 'wasmWidthOrHeightChange error and pause error', e); }); } }); // wasm 使用 video 渲染失败,降级到 canvas 渲染 this.player.once(EVENTS_ERROR.wasmUseVideoRenderError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'wasmUseVideoRenderError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.wasmUseVideoRenderError, error)); this.debug.log('JbPro', 'wasmUseVideoRenderError and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer({ useVideoRender: false, useCanvasRender: true }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wasmUseVideoRenderError and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmUseVideoRenderError, {}, error); this.debug.error('JbPro', 'wasmUseVideoRenderError and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.wasmUseVideoRenderError, {}, error); this.debug.error('JbPro', 'wasmUseVideoRenderError and _resetPlayer error', e); }); }); // video element play error this.player.once(EVENTS_ERROR.videoElementPlayingFailed, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'videoElementPlayingFailed but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.videoElementPlayingFailed, error)); if (this.player && this.player._opt.videoElementPlayingFailedReplay) { this.debug.log('JbPro', `videoElementPlayingFailed and useMSE is ${this._opt.useMSE} and reset player and play`); const _url = this._opt.url; const _playOptions = this._opt.playOptions; // 这边如果是 mse 解码的话,需要强制变为其他解码模式。 this._resetPlayer({ useMSE: false, useVideoRender: false, useCanvasRender: true }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'videoElementPlayingFailed and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailed, {}, error); this.debug.error('JbPro', 'videoElementPlayingFailed and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailed, {}, error); this.debug.error('JbPro', 'videoElementPlayingFailed and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailed, {}, error); this.debug.error('JbPro', 'videoElementPlayingFailed and videoElementPlayingFailedReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailed, {}, error); this.debug.error('JbPro', 'videoElementPlayingFailed and _pause error', e); }); } }); // simd this.player.once(EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'simdH264DecodeVideoWidthIsTooLarge but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, error)); if (this.player && this.player._opt.simdH264DecodeVideoWidthIsTooLargeReplay) { this.debug.log('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and reset player and play'); const _url = this._opt.url; const _playOptions = this._opt.playOptions; // downgrade to wasm this._resetPlayer({ useSIMD: false }).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, {}, error); // reject(); this.debug.error('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and reset player and play error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, {}, error); this.debug.error('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, {}, error); this.debug.error('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and simdDecodeErrorReplay is false'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.simdH264DecodeVideoWidthIsTooLarge, {}, error); this.debug.error('JbPro', 'simdH264DecodeVideoWidthIsTooLarge and pause error', e); }); } }); // 网络超时 this.player.once(EVENTS.networkDelayTimeout, error => { // 网络超时需要特殊处理,不需要调用pause方法,直接destroy, // 也不抛出playFailedAndPaused事件; if (this.player._opt.networkDelayTimeoutReplay) { if (this.isDestroyed()) { this.debug.log('JbPro', 'networkDelayTimeout but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS.networkDelayTimeout, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', `network delay time out and reset player and play`); const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player && this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'wasm decode error and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.networkDelayTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'wasm decode error and reset player and play error', e); }); }).catch(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS.networkDelayTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'wasm decode error and _resetPlayer error'); }); } }); // stream fetch error this.player.once(EVENTS_ERROR.fetchError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'fetchError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.fetchError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', 'fetch error and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.fetchError, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.fetchError, lastFrameInfo, error); this.debug.error('JbPro', 'fetch error and pause', e); }); }); // stream end this.player.once(EVENTS.streamEnd, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'streamEnd but player is destroyed and return'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS.streamEnd, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); const isMeaningfulRetry = this._checkIsMeaningfulRetry(RETRY_TYPE.ws1006); if (this.player && this.player._opt.websocket1006ErrorReplay && '' + error === '1006' && isMeaningfulRetry) { this.debug.log('JbPro', `streamEnd and websocket1006ErrorReplay is true and error is 1006 and delay ${this._opt.websocket1006ErrorReplayDelayTime}s reset player and play`); const _playOptions = this._opt.playOptions; const _url = this._opt.url; this._websocket1006ErrorRetryLog.push(now$2()); let options = {}; if (this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } this._resetPlayer(options).then(() => { setTimeout(() => { if (this.isDestroyed()) { this.debug.log('JbPro', 'streamEnd and 1006 error but player is destroyed and return'); return; } this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'streamEnd and 1006 error and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.streamEnd, {}, error); // reject(); this.debug.error('JbPro', 'streamEnd and 1006 error and reset player and play error', e); }); }, this._opt.websocket1006ErrorReplayDelayTime * 1000); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.streamEnd, {}, error); this.debug.error('JbPro', 'streamEnd and 1006 and _resetPlayer error', e); }); } else { this.debug.log('JbPro', `streamEnd pause player ${isFalse(isMeaningfulRetry) ? 'and is not meaningful retry' : ''}`); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS.streamEnd, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.streamEnd, lastFrameInfo, error); this.debug.error('JbPro', 'streamEnd pause', e); }); } }); // stream websocket error this.player.once(EVENTS_ERROR.websocketError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'websocketError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.websocketError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', 'websocketError and reset player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.websocketError, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.websocketError, lastFrameInfo, error); this.debug.error('JbPro', 'websocketError and pause', e); }); }); // stream webrtc error this.player.once(EVENTS_ERROR.webrtcError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webrtcError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.webrtcError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', 'webrtcError and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webrtcError, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.webrtcError, lastFrameInfo, error); this.debug.error('JbPro', 'webrtcError and pause', e); }); }); // stream hls error this.player.once(EVENTS_ERROR.hlsError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'hlsError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.hlsError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', 'hlsError and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.hlsError, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.hlsError, lastFrameInfo, error); this.debug.error('JbPro', 'hlsError and pause', e); }); }); // stream AliyunRtc error this.player.once(EVENTS_ERROR.aliyunRtcError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'aliyunRtcError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.aliyunRtcError, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); this.debug.log('JbPro', 'aliyunRtcError and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.aliyunRtcError, lastFrameInfo, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.aliyunRtcError, lastFrameInfo, error); this.debug.error('JbPro', 'aliyunRtcError and pause', e); }); }); // decoder worker init error this.player.once(EVENTS_ERROR.decoderWorkerInitError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'decoderWorkerInitError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.decoderWorkerInitError, error)); this.debug.log('JbPro', 'decoderWorkerInitError and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.decoderWorkerInitError, {}, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.decoderWorkerInitError, {}, error); this.debug.error('JbPro', 'decoderWorkerInitError and pause', e); }); }); // webrtc h265 stream fetch error this.player.once(EVENTS_ERROR.videoElementPlayingFailedForWebrtc, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'videoElementPlayingFailedForWebrtc but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.videoElementPlayingFailedForWebrtc, error)); this.debug.log('JbPro', 'videoElementPlayingFailedForWebrtc and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailedForWebrtc, {}, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoElementPlayingFailedForWebrtc, {}, error); this.debug.error('JbPro', 'videoElementPlayingFailedForWebrtc and pause', e); }); }); // video Info Error width is undefined or height is undefined this.player.once(EVENTS_ERROR.videoInfoError, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'videoInfoError but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS_ERROR.videoInfoError, error)); this.debug.log('JbPro', 'videoInfoError and pause player'); const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoInfoError, {}, error); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS_ERROR.videoInfoError, {}, error); this.debug.error('JbPro', 'videoInfoError and pause', e); }); }); // webrtc h265 直接触发重播逻辑的。 this.player.once(EVENTS.webrtcStreamH265, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'webrtcStreamH265 but player is destroyed'); return; } // 需要特殊处理,不需要调用pause方法,直接destroy, // 也不抛出playFailedAndPaused事件; this.debug.log('JbPro', `webrtcStreamH265 and reset player and play`); const _url = this._opt.url; const _playOptions = this._opt.playOptions; const options = { isWebrtcH265: true }; this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); this.debug.log('JbPro', 'webrtcStreamH265 and reset player and play success'); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.webrtcStreamH265, {}, error); this.debug.error('JbPro', 'webrtcStreamH265 and reset player and play error', e); }); }).catch(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS.webrtcStreamH265, {}, error); this.debug.error('JbPro', 'webrtcStreamH265 and _resetPlayer error'); }); }); // 监听 delay timeout this.player.on(EVENTS.delayTimeout, error => { if (this.isDestroyed()) { this.debug.log('JbPro', 'delay timeout but player is destroyed'); return; } this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS.delayTimeout, error)); const lastFrameInfo = this._getVideoLastIframeInfo(); if (this.player && this.player._opt.heartTimeoutReplay && (this._heartTimeoutReplayTimes < this.player._opt.heartTimeoutReplayTimes || this.player._opt.heartTimeoutReplayTimes === -1)) { this.debug.log('JbPro', `delay timeout replay time is ${this._heartTimeoutReplayTimes} and heartTimeoutReplayTimes is ${this.player._opt.heartTimeoutReplayTimes}`); if (this.isDestroyed()) { this.debug && this.debug.warn('JbPro', 'delay timeout replay but player is destroyed'); return; } this._heartTimeoutReplayTimes += 1; const _url = this._opt.url; const _playOptions = this._opt.playOptions; let options = {}; if (this.player._opt.heartTimeoutReplayUseLastFrameShow || this.player._opt.replayUseLastFrameShow) { options = Object.assign({}, options, lastFrameInfo, { loadingIcon: this.player._opt.replayShowLoadingIcon }); } if (isFalse(this.isMute())) { options.isNotMute = true; } // support play and playback this._resetPlayer(options).then(() => { this.play(_url, _playOptions).then(() => { // resolve(); // this._heartTimeoutReplayTimes = 0; }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.delayTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'delay timeout replay error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.delayTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'delay timeout _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS.delayTimeout, lastFrameInfo, error); if (this.player) { this.emit(EVENTS.delayTimeoutRetryEnd); this.emit(EVENTS.playFailedAndPaused, EVENTS.delayTimeoutRetryEnd); } this.debug.warn('JbPro', `delayTimeoutRetryEnd and opt.heartTimeout is ${this.player && this.player._opt.heartTimeout} and opt.heartTimeoutReplay is ${this.player && this.player._opt.heartTimeoutReplay} and opt.heartTimeoutReplayTimes is ${this.player && this.player._opt.heartTimeoutReplayTimes},and local._heartTimeoutReplayTimes is ${this._heartTimeoutReplayTimes}`); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.delayTimeout, lastFrameInfo, error); this.debug.error('JbPro', 'delay timeout and pause error', e); }); } }); // 监听 loading timeout this.player.on(EVENTS.loadingTimeout, error => { this.emit(EVENTS.crashLog, this.getCrashLog(EVENTS.loadingTimeout, error)); if (this.isDestroyed()) { this.debug.log('JbPro', 'loading timeout but player is destroyed'); return; } if (this.player && this.player._opt.loadingTimeoutReplay && (this._loadingTimeoutReplayTimes < this.player._opt.loadingTimeoutReplayTimes || this.player._opt.loadingTimeoutReplayTimes === -1)) { this.debug.log('JbPro', `loading timeout and replay time is ${this._loadingTimeoutReplayTimes} and loadingTimeoutReplayTimes is ${this.player._opt.loadingTimeoutReplayTimes}`); if (this.isDestroyed()) { this.debug && this.debug.warn('JbPro', 'delay timeout replay but player is destroyed'); return; } this._loadingTimeoutReplayTimes += 1; const _url = this._opt.url; const _playOptions = this._opt.playOptions; this._resetPlayer().then(() => { this.play(_url, _playOptions).then(() => { // resolve(); // this._loadingTimeoutReplayTimes = 0; }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.loadingTimeout, {}, error); this.debug.error('JbPro', 'loading timeout replay error', e); }); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.loadingTimeout, {}, error); this.debug.error('JbPro', 'loading timeout _resetPlayer error', e); }); } else { const isClear = this._opt.playFailedUseLastFrameShow === false ? true : false; this.player.setPlayFailedAndPaused(); this._pause(isClear).then(() => { this.emit(EVENTS.playFailedAndPaused, EVENTS.loadingTimeout, {}, error); if (this.player) { this.emit(EVENTS.loadingTimeoutRetryEnd); this.emit(EVENTS.playFailedAndPaused, EVENTS.loadingTimeoutRetryEnd, {}, error); } this.debug.log('JbPro', `loadingTimeoutRetryEnd and opt.loadingTimeout is ${this.player && this.player._opt.loadingTimeout} and opt.loadingTimeoutReplay is ${this.player && this.player._opt.loadingTimeoutReplay} and local._loadingTimeoutReplayTimes time is ${this._loadingTimeoutReplayTimes} and opt.loadingTimeoutReplayTimes is ${this.player && this.player._opt.loadingTimeoutReplayTimes}`); }).catch(e => { this.emit(EVENTS.playFailedAndPaused, EVENTS.loadingTimeout, {}, error); this.debug.error('JbPro', 'loading timeout and pause error', e); }); } }); if (this._hasLoaded()) { this.player.play(url, options).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', '_hasLoaded() and play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('_hasLoaded() and play error', e)); this.player && this.player.pause().then(() => { reject(e); }).catch(e => { reject(e); this.debug.error('JbPro', '_hasLoaded() and play error and next pause error', e); }); }); } else { this.player.once(EVENTS.decoderWorkerInit, () => { this.player.play(url, options).then(() => { resolve(); }).catch(e => { this.debug.error('JbPro', 'decoderWorkerInit and play error', e); this.emit(EVENTS.crashLog, this.getCrashLog('decoderWorkerInit and play error', e)); this.player && this.player.pause().then(() => { reject(e); }).catch(e => { reject(e); this.debug.error('JbPro', 'decoderWorkerInit and play error and next pause error', e); }); }); }); } }; const playOptions = this.player.getOption(); const isOldHls = isHls && isFalse(this._opt.supportHls265); const isOldWebrtc = isWebrtc && isFalse(this._opt.isWebrtcH265); const isHlsCanVideoPlay = isHls && !!canPlayAppleMpegurl(); const isHlsButDemuxUseWorker = isHls && playOptions.demuxUseWorker; if (isOldHls || isOldWebrtc || isAliyunRtc || needResetPlayer || isMpeg4 || isHlsCanVideoPlay || isHlsButDemuxUseWorker) { this.debug.log('JbPro', `need reset player and isOldHls is ${isOldHls} and isOldWebrtc is ${isOldWebrtc} and isAliyunRtc is ${isAliyunRtc} and needResetPlayer(url change) is ${needResetPlayer} and isMpeg4 is ${isMpeg4} and isHlsCanVideoPlay is ${isHlsCanVideoPlay} and isHlsButDemuxUseWorker is ${isHlsButDemuxUseWorker}`); this._resetPlayer({ protocol, demuxType, isHls, isWebrtc, isWebrtcForZLM, isWebrtcForSRS, isWebrtcForOthers, isAliyunRtc, cryptoKey: options.cryptoKey || '', cryptoIV: options.cryptoIV || '', url, playOptions: options }).then(() => { _playNext(); }).catch(e => { reject('reset player error'); }); } else { const newOptions = { protocol, demuxType, isHls, isWebrtc, isAliyunRtc, isFlv: this._opt.isFlv, isFmp4: this._opt.isFmp4, isMpeg4: this._opt.isMpeg4, isNakedFlow: this._opt.isNakedFlow, isTs: this._opt.isTs, cryptoKey: options.cryptoKey || '', cryptoIV: options.cryptoIV || '' }; if (this._opt.isNakedFlow) { newOptions.mseDecodeAudio = false; } this.player.updateOption(newOptions); if (options.cryptoKey && options.cryptoIV) { if (this.player.decoderWorker) { this.player.decoderWorker.updateWorkConfig({ key: 'cryptoKey', value: options.cryptoKey }); this.player.decoderWorker.updateWorkConfig({ key: 'cryptoIV', value: options.cryptoIV }); } } _playNext(); } }); } _resetDemuxType(type) { this._opt.isFlv = false; this._opt.isFmp4 = false; this._opt.isMpeg4 = false; this._opt.isNakedFlow = false; this._opt.isHls = false; this._opt.isWebrtc = false; this._opt.isWebrtcForZLM = false; this._opt.isWebrtcForSRS = false; this._opt.isWebrtcForOthers = false; this._opt.isAliyunRtc = false; this._opt.isTs = false; if (type) { this._opt[type] = true; } if (type !== 'isFmp4') { this._opt.isFmp4Private = false; } } /** * */ resize() { this.debug.log('JbPro', 'resize()'); this.player && this.player.resize(); } /** * * @param time {number} */ setBufferTime(time) { this.debug.log('JbPro', `setBufferTime() ${time}`); time = Number(time); if (time > 10) { this.debug.warn('JbPro', `setBufferTime() buffer time is ${time} second, is too large, video will show blank screen until cache ${time} second buffer data`); } const videoBuffer = time * 1000; this._opt.videoBuffer = videoBuffer; if (this.player) { // s -> ms this.player.updateOption({ videoBuffer: videoBuffer }, true); } else { this.debug.warn('JbPro', 'setBufferTime() player is null'); } } /** * * @param time */ setBufferDelayTime(time) { this.debug.log('JbPro', `setBufferDelayTime() ${time}`); time = Number(time); if (time < 0.2) { this.debug.warn('JbPro', `setBufferDelayTime() buffer time delay is ${time} second, is too small`); } time = clamp(time, 0.2, 100); const videoBufferDelay = time * 1000; this._opt.videoBufferDelay = videoBufferDelay; if (this.player) { // s -> ms this.player.updateOption({ videoBufferDelay: videoBufferDelay }, true); } else { this.debug.warn('JbPro', 'setBufferDelayTime() player is null'); } } /** * * @param deg {number} */ setRotate(deg) { this.debug.log('JbPro', `setRotate() ${deg}`); deg = parseInt(deg, 10); const list = [0, 90, 180, 270]; if (this._opt.rotate === deg || list.indexOf(deg) === -1) { this.debug.warn('JbPro', `setRotate() rotate is ${deg} and this._opt.rotate is ${this._opt.rotate}`); return; } this._opt.rotate = deg; if (this.player) { this.player.updateOption({ rotate: deg }); this.resize(); } else { this.debug.warn('JbPro', 'setRotate() player is null'); } } /** * 设置镜像翻转 * @param type */ setMirrorRotate(mirrorRotate) { this.debug.log('JbPro', `setMirrorRotate() ${mirrorRotate}`); const list = ['none', 'level', 'vertical']; if (!mirrorRotate) { mirrorRotate = 'none'; } if (this._opt.mirrorRotate === mirrorRotate || list.indexOf(mirrorRotate) === -1) { this.debug.warn('JbPro', `setMirrorRotate() mirrorRotate is ${mirrorRotate} and this._opt.mirrorRotate is ${this._opt.mirrorRotate}`); return; } this._opt.mirrorRotate = mirrorRotate; if (this.player) { this.player.updateOption({ mirrorRotate }); this.resize(); } else { this.debug.warn('JbPro', 'setMirrorRotate() player is null'); } } setAspectRatio(aspectRatio) { this.debug.log('JbPro', `setAspectRatio() ${aspectRatio}`); const list = ['default', '4:3', '16:9']; if (!aspectRatio) { aspectRatio = 'default'; } if (this._opt.aspectRatio === aspectRatio || list.indexOf(aspectRatio) === -1) { this.debug.warn('JbPro', `setAspectRatio() aspectRatio is ${aspectRatio} and this._opt.aspectRatio is ${this._opt.mirrorRotate}`); return; } this._opt.aspectRatio = aspectRatio; if (this.player) { this.player.updateOption({ aspectRatio }); this.resize(); } else { this.debug.warn('JbPro', 'setAspectRatio() player is null'); } } /** * * @returns {boolean} */ hasLoaded() { return true; } /** * inner method * @returns {Player|*|boolean} * @private */ _hasLoaded() { return this.player && this.player.loaded || false; } /** * */ setKeepScreenOn() { this.debug.log('JbPro', 'setKeepScreenOn()'); this._opt.keepScreenOn = true; if (this.player) { this.player.updateOption({ keepScreenOn: true }); } else { this.debug.warn('JbPro', 'setKeepScreenOn() player is not ready'); } } /** * * @param flag {Boolean} */ setFullscreen(flag) { this.debug.log('JbPro', `setFullscreen() ${flag}`); const fullscreen = !!flag; if (!this.player) { this.debug.warn('JbPro', 'setFullscreen() player is not ready'); return; } if (this.player.fullscreen !== fullscreen) { this.player.fullscreen = fullscreen; } else { this.debug.warn('JbPro', `setFullscreen() fullscreen is ${fullscreen} and this.player.fullscreen is ${this.player.fullscreen}`); } } setWebFullscreen(flag) { this.debug.log('JbPro', `setWebFullscreen() ${flag}`); const webFullscreen = !!flag; if (!this.player) { this.debug.warn('JbPro', 'setWebFullscreen() player is not ready'); return; } this.player.webFullscreen = webFullscreen; } /** * * @param filename {string} * @param format {string} * @param quality {number} * @param type {string} download,base64,blob */ screenshot(filename, format, quality, type) { this.debug.log('JbPro', `screenshot() ${filename} ${format} ${quality} ${type}`); if (this.player && this.player.video) { return this.player.video.screenshot(filename, format, quality, type); } else { this.debug.warn('JbPro', 'screenshot() player is not ready'); } return null; } /** * * @param options * @returns Promise */ screenshotWatermark(options) { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'screenshotWatermark()', options); if (this.player && this.player.video) { this.player.video.screenshotWatermark(options).then(data => { resolve(data); }).catch(e => { reject(e); }); } else { this.debug.warn('JbPro', 'screenshotWatermark() player is not ready'); reject('player is not ready'); } }); } /** * * @param fileName {string} * @param fileType {string} * @returns {Promise<unknown>} */ startRecord(fileName, fileType) { return new Promise((resolve, reject) => { this.debug.log('JbPro', `startRecord() ${fileName} ${fileType}`); if (!this.player) { this.debug.warn('JbPro', 'startRecord() player is not ready'); return reject('player is not ready'); } if (this.player.playing) { this.player.startRecord(fileName, fileType); resolve(); } else { this.debug.warn('JbPro', 'startRecord() player is not playing'); reject('not playing'); } }); } stopRecordAndSave(type, fileName) { return new Promise((resolve, reject) => { this.debug.log('JbPro', `stopRecordAndSave() ${type} ${fileName}`); if (this.player && this.player.recording) { this.player.stopRecordAndSave(type, fileName).then(blob => { resolve(blob); }).catch(e => { reject(e); }); } else { reject('not recording'); } }); } /** * * @returns {Boolean} */ isPlaying() { let result = false; if (this.player) { result = this.player.isPlaying(); } return result; } /** * * @returns {boolean} */ isLoading() { return this.player ? this.player.loading : false; } /** * * @returns {boolean} */ isPause() { let result = false; if (this._opt.playType === PLAY_TYPE.player) { result = !this.isPlaying() && !this.isLoading(); } else if (this._opt.playType === PLAY_TYPE.playbackTF && this.player) { result = this.player.playbackPause; } return result; } isPaused() { return this.isPause(); } /** * * @returns {boolean} */ isPlaybackPause() { let result = false; if (this._opt.playType === PLAY_TYPE.playbackTF && this.player) { result = this.player.playbackPause; } return result; } /** * 是否静音状态 * @returns {Boolean} */ isMute() { let result = true; if (this.player) { result = this.player.isAudioMute(); } return result; } /** * 是否在录制视频 * @returns {*} */ isRecording() { return this.player && this.player.recorder && this.player.recorder.recording || false; } isFullscreen() { let result = false; if (this.player) { result = this.player.fullscreen; } return result; } isWebFullscreen() { let result = false; if (this.player) { result = this.player.webFullscreen; } return result; } /** * 清除延迟 */ clearBufferDelay() { this.debug.log('JbPro', 'clearBufferDelay()'); if (this.player) { this.player.clearBufferDelay(); } else { this.debug.warn('JbPro', 'clearBufferDelay() player is not init'); } } setNetworkDelayTime(time) { this.debug.log('JbPro', `setNetworkDelayTime() ${time}`); time = Number(time); if (time < 1) { this.debug.warn('JbPro', `setNetworkDelayTime() network delay is ${time} second, is too small`); } time = clamp(time, 1, 100); const networkDelay = time * 1000; this._opt.networkDelay = networkDelay; if (this.player) { // s -> ms this.player.updateOption({ networkDelay: networkDelay }, true); } else { this.debug.warn('JbPro', 'setNetworkDelayTime() player is null'); } } /** * 获取解码方式 */ getDecodeType() { let result = ''; if (this.player) { result = this.player.getDecodeType(); } return result; } getRenderType() { let result = ''; if (this.player) { result = this.player.getRenderType(); } return result; } getAudioEngineType() { let result = ''; if (this.player) { result = this.player.getAudioEngineType(); } return result; } /** * get playing timestamp * @returns {number} */ getPlayingTimestamp() { let result = 0; if (this.player) { result = this.player.getPlayingTimestamp(); } return result; } /** * get player now status * @returns {string} */ getStatus() { let result = PLAYER_STATUS.destroy; if (this.player) { if (this.player.loading) { result = PLAYER_STATUS.loading; } else { if (this.player.playing) { result = PLAYER_STATUS.playing; } else { result = PLAYER_STATUS.paused; } } } return result; } getPlayType() { return this.player ? this.player._opt.playType : PLAY_TYPE.player; } togglePerformancePanel(flag) { this.debug.log('JbPro', `togglePerformancePanel() ${flag}`); const prev = this.player._opt.showPerformance; let toggleResult = !prev; if (isBoolean(flag)) { toggleResult = flag; } if (toggleResult === prev) { this.debug.warn('JbPro', `togglePerformancePanel() failed, showPerformance is prev: ${prev} === now: ${toggleResult}`); return; } if (this.player) { this.player.togglePerformancePanel(toggleResult); } else { this.debug.warn('JbPro', 'togglePerformancePanel() failed, this.player is not init'); } } /** * */ openZoom() { this.debug.log('JbPro', 'openZoom()'); if (this.player) { this.player.zooming = true; } else { this.debug.warn('JbPro', 'openZoom() failed, this.player is not init'); } } /** * */ closeZoom() { this.debug.log('JbPro', 'closeZoom()'); if (this.player) { this.player.zooming = false; } else { this.debug.warn('JbPro', 'closeZoom() failed, this.player is not init'); } } /** * * @returns {boolean} */ isZoomOpen() { let result = false; if (this.player) { result = this.player.zooming; } return result; } /** * * @param flag */ toggleZoom(flag) { this.debug.log('JbPro', `toggleZoom() ${flag}`); if (!isBoolean(flag)) { flag = !this.isZoomOpen(); } if (isTrue(flag)) { this.openZoom(); } else if (isFalse(false)) { this.closeZoom(); } } /** * */ expandZoom() { this.debug.log('JbPro', 'expandZoom()'); if (this.player && this.player.zoom && this.player.zooming) { this.player.zoom.expandPrecision(); } else { this.debug.warn('JbPro', 'expandZoom() failed, zoom is not open or not init'); } } /** * */ narrowZoom() { this.debug.log('JbPro', 'narrowZoom()'); if (this.player && this.player.zoom && this.player.zooming) { this.player.zoom.narrowPrecision(); } else { this.debug.warn('JbPro', 'narrowZoom failed, zoom is not open or not init'); } } /** * * @returns {number} */ getCurrentZoomIndex() { let result = 1; if (this.player && this.player.zoom) { result = this.player.zoom.currentZoom; } return result; } //--------------------------------------------------------------- talk start --------------------------------------------- /** * * @param wsUrl * @param options * @returns {Promise<unknown>} */ startTalk(wsUrl) { let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; return new Promise((resolve, reject) => { this.debug.log('JbPro', 'startTalk()', wsUrl, options); this._initTalk(options); this.talk.startTalk(wsUrl).then(() => { resolve(); this.talk.once(EVENTS.talkStreamClose, () => { this.debug.warn('JbPro', 'talk stream close'); this.stopTalk().catch(e => {}); }); this.talk.once(EVENTS.talkStreamError, e => { this.debug.warn('JbPro', 'talk stream error', e); this.stopTalk().catch(e => {}); }); this.talk.once(EVENTS.talkStreamInactive, () => { this.debug.warn('JbPro', 'talk stream inactive'); this.stopTalk().catch(e => {}); }); }).catch(e => { reject(e); }); }); } /** * * @returns {Promise<unknown>} */ stopTalk() { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'stopTalk()'); if (!this.talk) { return reject('stopTalk() talk is not init'); } this.talk.destroy(); resolve(); }); } /** * * @returns {Promise<unknown>} */ getTalkVolume() { return new Promise((resolve, reject) => { if (!this.talk) { return reject('getTalkVolume() talk is not init'); } let result = this.talk.volume; resolve(result); }); } /** * * @param volume * @returns {Promise<unknown>} */ setTalkVolume(volume) { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'setTalkVolume()', volume); if (!this.talk) { return reject('setTalkVolume() talk is not init'); } this.talk.setVolume(volume / 100); resolve(); }); } //--------------------------------------------------------------- talk end --------------------------------------------- setNakedFlowFps(fps) { return new Promise((resolve, reject) => { this.debug.log('JbPro', 'setNakedFlowFps()', fps); if (isEmpty(fps)) { return reject('setNakedFlowFps() fps is empty'); } let _fps = Number(fps); _fps = clamp(_fps, 1, 100); this._opt.nakedFlowFps = _fps; if (this.player) { this.player.updateOption({ nakedFlowFps: _fps }); } else { this.debug.warn('JbPro', 'setNakedFlowFps() player is null'); } resolve(); }); } getCrashLog(type) { let error = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; if (!this.player) { return; } const statsData = this.player.getAllStatsData(); const player = this.player; let result = { url: this._opt.url, playType: player.isPlayback() ? 'playback' : 'live', demuxType: player.getDemuxType(), decoderType: player.getDecodeType(), renderType: player.getRenderType(), videoInfo: { encType: '', width: '', height: '' }, audioInfo: { encType: '', sampleRate: '', channels: '' }, audioEngine: player.getAudioEngineType(), allTimes: statsData.pTs, timestamp: now$2(), // current timestamp type: type, error: errorToString(error) || type }; if (player.video) { const videoInfo = player.video.videoInfo || {}; result.videoInfo = { encType: videoInfo.encType || '', width: videoInfo.width || '', height: videoInfo.height || '' }; } if (player.audio) { const audioInfo = player.audio.audioInfo || {}; result.audioInfo = { encType: audioInfo.encType || '', sampleRate: audioInfo.sampleRate || '', channels: audioInfo.channels || '' }; } return result; } updateDebugLevel(level) { this.debug.log('JbPro', 'updateDebugLevel()', level); if (!(level === DEBUG_LEVEL.debug || level === DEBUG_LEVEL.warn)) { this.debug.warn('JbPro', `updateDebugLevel() level is not valid, level: ${level}`); return; } if (level === this.player._opt.debugLevel) { this.debug.warn('JbPro', `updateDebugLevel() level is same, level: ${level}`); return; } this._opt.debugLevel = level; if (this.player) { this.player.updateOption({ debugLevel: level }, true); } else { this.debug.warn('JbPro', 'updateDebugLevel() player is null'); } } updateWatermark(config) { this.debug.log('JbPro', 'updateWatermark()', config); if (this.player) { this.player.updateWatermark(config); } else { this.debug.warn('JbPro', 'updateWatermark() player is not init'); } } removeWatermark() { this.debug.log('JbPro', 'removeWatermark()'); if (this.player) { this.player.removeWatermark(); } else { this.debug.warn('JbPro', 'removeWatermark() player is not init'); } } updateFullscreenWatermark(config) { this.debug.log('JbPro', 'updateFullscreenWatermark()', config); if (isNotEmptyObject(config)) { // update config this._opt.fullscreenWatermarkConfig = config; const watermarkConfig = formatFullscreenWatermarkOptions(this.$container, config); if (!watermarkConfig.watermark_txt) { this.debug.warn('JbPro', 'fullscreenWatermarkConfig text is empty'); return; } this.watermark.load(watermarkConfig); } else { this.debug.warn('JbPro', `updateFullscreenWatermark() config is not valid, config: ${config}`); } } removeFullscreenWatermark() { this.debug.log('JbPro', 'removeFullscreenWatermark()'); if (this.watermark) { this.watermark.remove(); } else { this.debug.warn('JbPro', 'removeFullscreenWatermark() watermark is not init'); } } faceDetectOpen() { this.debug.log('JbPro', 'faceDetectOpen()'); if (this.player) { this.player.faceDetect(true); } else { this.debug.warn('JbPro', 'faceDetectOpen() player is not init'); } } faceDetectClose() { this.debug.log('JbPro', 'faceDetectClose()'); if (this.player) { this.player.faceDetect(false); } else { this.debug.warn('JbPro', 'faceDetectClose() player is not init'); } } objectDetectOpen() { this.debug.log('JbPro', 'objectDetectOpen()'); if (this.player) { this.player.objectDetect(true); } else { this.debug.warn('JbPro', 'objectDetectOpen() player is not init'); } } objectDetectClose() { this.debug.log('JbPro', 'objectDetectClose()'); if (this.player) { this.player.objectDetect(false); } else { this.debug.warn('JbPro', 'objectDetectClose() player is not init'); } } sendWebsocketMessage(msg) { this.debug.log('JbPro', 'sendWebsocketMessage()', msg); if (this.player) { this.player.sendWebsocketMessage(msg); } else { this.debug.warn('JbPro', 'sendWebsocketMessage() player is not init'); } } // addContentToCanvas(contentList) { this.debug.log('JbPro', 'addContentToCanvas()'); if (this.player) { this.player.addContentToCanvas(contentList); } else { this.debug.warn('JbPro', 'addContentToCanvas() player is not init'); } } clearContentToCanvas() { this.debug.log('JbPro', 'clearContentToCanvas()'); if (this.player) { this.player.addContentToCanvas([]); } else { this.debug.warn('JbPro', 'clearContentToCanvas() player is not init'); } } // addContentToContainer(contentList) { this.debug.log('JbPro', 'addContentToContainer()'); if (this.player) { this.player.addContentToContainer(contentList); } else { this.debug.warn('JbPro', 'addContentToContainer() player is not init'); } } // clearContentToContainer() { this.debug.log('JbPro', 'clearContentToContainer()'); if (this.player) { this.player.addContentToContainer([]); } else { this.debug.warn('JbPro', 'clearContentToContainer() player is not init'); } } setControlHtml(html) { this.debug.log('JbPro', 'setControlHtml()', html); if (this.player) { this.player.setControlHtml(html); } else { this.debug.warn('JbPro', 'setControlHtml() player is not init'); } } clearControlHtml() { this.debug.log('JbPro', 'clearControlHtml()'); if (this.player) { this.player.clearControlHtml(); } else { this.debug.warn('JbPro', 'clearControlHtml() player is not init'); } } getVideoInfo() { let result = null; if (this.player) { result = this.player.getVideoInfo(); } return result; } getAudioInfo() { let result = null; if (this.player) { result = this.player.getAudioInfo(); } return result; } setSm4CryptoKey(key) { this.debug.log('JbPro', 'setSm4CryptoKey()', key); key = '' + key; if (key.length !== 32) { this.debug.warn('JbPro', `setSm4CryptoKey() key is invalid and length is ${key.length} !== 32`); return; } this._opt.sm4CryptoKey = key; if (this.player) { this.player.updateOption({ sm4CryptoKey: key }, true); } else { this.debug.warn('JbPro', 'setSm4CryptoKey() player is null'); } } setM7sCryptoKey(key) { this.debug.log('JbPro', 'setM7sCryptoKey()', key); key = '' + key; this._opt.m7sCryptoKey = key; if (this.player) { this.player.updateOption({ m7sCryptoKey: key }, true); } else { this.debug.warn('JbPro', 'setM7sCryptoKey() player is null'); } } setXorCryptoKey(key) { this.debug.log('JbPro', 'setXorCryptoKey()', key); key = '' + key; this._opt.xorCryptoKey = key; if (this.player) { this.player.updateOption({ xorCryptoKey: key }, true); } else { this.debug.warn('JbPro', 'setXorCryptoKey() player is null'); } } updateLoadingText(text) { this.debug.log('JbPro', 'updateLoadingText()', text); if (this.player) { this.player.updateLoadingText(text); } else { this.debug.warn('JbPro', 'updateLoadingText() player is null'); } } updateIsEmitSEI(isEmitSEI) { this.debug.log('JbPro', 'updateIsEmitSEI()', isEmitSEI); this._opt.isEmitSEI = isEmitSEI; if (this.player) { this.player.updateOption({ isEmitSEI: isEmitSEI }, true); } else { this.debug.warn('JbPro', 'updateIsEmitSEI() player is null'); } } /** * * @param ptz ptz type * @param speed 0-9 * @returns {string} */ getPTZCmd(ptz, speed) { this.debug.log('JbPro', 'getPTZCmd()', ptz); if (!ptz) { this.debug.warn('JbPro', 'getPTZCmd() ptz is null'); return; } if (this.player) { return getPTZCmd({ type: ptz, index: 0, speed }); } else { this.debug.warn('JbPro', 'getPTZCmd() player is null'); } } // 供测试使用 downloadTempNakedFlowFile() { return new Promise((resolve, reject) => { if (this.player) { this.player.downloadNakedFlowFile(); resolve(); } else { reject('player is not init'); } }); } // 供测试使用 downloadTempFmp4File() { return new Promise((resolve, reject) => { if (this.player) { this.player.downloadFmp4File(); resolve(); } else { reject('player is not init'); } }); } // 供测试使用 downloadTempMpeg4File() { return new Promise((resolve, reject) => { if (this.player) { this.player.downloadMpeg4File(); resolve(); } else { reject('player is not init'); } }); } // 供测试使用 downloadTempRtpFile() { return new Promise((resolve, reject) => { if (this.talk) { this.talk.downloadRtpFile(); resolve(); } else { reject('talk is not init'); } }); } // downloadMemoryLog() { if (this.memoryLogger) { this.memoryLogger.download(); } } _getVideoLastIframeInfo() { const videoInfo = this.getVideoInfo() || {}; const lastImage = this.screenshot('', 'png', 0.92, 'base64'); const tempInfo = { loadingBackground: lastImage, loadingBackgroundWidth: videoInfo.width || 0, loadingBackgroundHeight: videoInfo.height || 0 }; // update temp info if (tempInfo.loadingBackground && tempInfo.loadingBackgroundWidth && tempInfo.loadingBackgroundHeight) { this._tempVideoLastIframeInfo = tempInfo; } // return temp info return this._tempVideoLastIframeInfo || {}; } getExtendBtnList() { this.debug.log('JbPro', 'getExtendBtnList()'); let result = []; if (this.player) { result = this.player.getExtendBtnList(); } else { this.debug.warn('JbPro', 'getExtendBtnList() player is null'); } return result; } getFlvMetaData() { this.debug.log('JbPro', 'getFlvMetaData()'); let result = null; if (this.player) { result = this.player.getMetaData(); } else { this.debug.warn('JbPro', 'getFlvMetaData() player is null'); } return result; } /** * * @param interval */ updateAiFaceDetectInterval(interval) { this.debug.log('JbPro', 'updateAiFaceDetectInterval()', interval); interval = Number(interval); const aiFaceDetectInterval = interval * 1000; this._opt.aiFaceDetectInterval = aiFaceDetectInterval; if (this.player) { // s -> ms this.player.updateOption({ aiFaceDetectInterval: aiFaceDetectInterval }); } else { this.debug.warn('JbPro', 'updateAiFaceDetectInterval() player is null'); } } /** * * @param level */ updateAiFaceDetectLevel(level) { this.debug.log('JbPro', 'updateAiFaceDetectLevel()', level); if (!AI_FACE_DETECTOR_LEVEL[level]) { this.debug.warn('JbPro', `'updateAiFaceDetectLevel() level ${level} is invalid'`); return; } const width = AI_FACE_DETECTOR_LEVEL[level]; this._opt.aiFaceDetectWidth = width; if (this.player) { this.player.updateOption({ aiFaceDetectWidth: width }); if (this.player.ai) { this.player.ai.updateFaceDetectorConfig({ detectWidth: width }); } } else { this.debug.warn('JbPro', 'updateAiFaceDetectLevel() player is null'); } } /** * * @param interval */ updateAiObjectDetectInterval(interval) { this.debug.log('JbPro', 'updateAiObjectDetectInterval()', interval); interval = Number(interval); const aiObjectDetectInterval = interval * 1000; this._opt.aiObjectDetectInterval = aiObjectDetectInterval; if (this.player) { // s -> ms this.player.updateOption({ aiObjectDetectInterval: aiObjectDetectInterval }); } else { this.debug.warn('JbPro', 'updateAiObjectDetectInterval() player is null'); } } /** * * @param level */ updateAiObjectDetectLevel(level) { this.debug.log('JbPro', 'updateAiObjectDetectLevel()', level); if (!AI_OBJECT_DETECTOR_LEVEL[level]) { this.debug.warn('JbPro', `'updateAiObjectDetectLevel() level ${level} is invalid'`); return; } const width = AI_OBJECT_DETECTOR_LEVEL[level]; this._opt.aiObjectDetectWidth = width; if (this.player) { this.player.updateOption({ aiObjectDetectWidth: width }); if (this.player.ai) { this.player.ai.updateObjectDetectorConfig({ detectWidth: width }); } } else { this.debug.warn('JbPro', 'updateAiObjectDetectLevel() player is null'); } } setCryptoKeyUrl(url) { this.debug.log('JbPro', 'setCryptoKeyUrl()', url); if (!url) { return; } this._opt.cryptoKeyUrl = url; } showErrorMessageTips(content) { this.debug.log('JbPro', 'showErrorMessageTips()', content); if (!content) { return; } if (this.player) { this.player.showTipsMessageByContent(content); } else { this.debug.warn('JbPro', 'showErrorMessageTips() player is null'); } } setPtzPosition(position) { this.debug.log('JbPro', 'setPtzPosition()', position); if (!position || isEmptyObject(position)) { return; } if (this.player) { this.player.setPtzPosition(position); } else { this.debug.warn('JbPro', 'showErrorMessageTips() player is null'); } } hideErrorMessageTips() { this.debug.log('JbPro', 'hideErrorMessageTips()'); if (this.player) { this.player.hideTipsMessage(); } else { this.debug.warn('JbPro', 'hideErrorMessageTips() player is null'); } } // check is meaningful retry _checkIsMeaningfulRetry(type) { let result = true; let retryLog = []; if (type === RETRY_TYPE.ws1006) { retryLog = this._websocket1006ErrorRetryLog; } else if (type === RETRY_TYPE.mseDecodeError) { retryLog = this._mseDecodeErrorRetryLog; } else if (type === RETRY_TYPE.wcsDecodeError) { retryLog = this._wcsDecodeErrorRetryLog; } // more than 5 if (retryLog.length >= 5) { const start = retryLog[0]; const end = retryLog[retryLog.length - 1]; const diff = end - start; // less than 10s if (diff <= 10 * 1000) { this.debug.warn(this.TAG_NAME, `retry type is ${type}, and retry length is ${retryLog.length}, and start is ${start} and end is ${end} and diff is ${diff}`); result = false; } } return result; } } // JessibucaPro.ERROR = EVENTS_ERROR; // JessibucaPro.EVENTS = JESSIBUCA_EVENTS; window.JessibucaPro = JessibucaPro; window.JbPro = JessibucaPro; window.WebPlayerPro = JessibucaPro; return JessibucaPro; })); //# sourceMappingURL=jessibuca-pro.js.map