-
-
current lang:
-
+
-
-
-
+
diff --git a/fixtures/xgplayer/index.js b/fixtures/xgplayer/index.js
index 112f784b3..3b6e6f876 100644
--- a/fixtures/xgplayer/index.js
+++ b/fixtures/xgplayer/index.js
@@ -41,7 +41,7 @@ function init(index = 0, config = {}) {
url: url
}
},
- url: "",
+ url: "https://lf3-static.bytednsdoc.com/obj/eden-cn/7221eh7pxhfuloj/wangziyi5.23.mp4",
DynamicBg: {
disable: false
},
diff --git a/packages/xgplayer-mp4-loader/package.json b/packages/xgplayer-mp4-loader/package.json
index f34f50cc9..14ab79eda 100755
--- a/packages/xgplayer-mp4-loader/package.json
+++ b/packages/xgplayer-mp4-loader/package.json
@@ -1,6 +1,6 @@
{
"name": "xgplayer-mp4-loader",
- "version": "3.0.11-alpha.10",
+ "version": "3.0.9-rc.20",
"main": "dist/index.min.js",
"module": "es/index.js",
"typings": "es/index.d.ts",
@@ -15,15 +15,15 @@
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public",
- "tag": "alpha"
+ "tag": "rc"
},
"license": "MIT",
"unpkgFiles": [
"dist"
],
"dependencies": {
- "xgplayer-transmuxer": "3.0.11-alpha.10",
- "xgplayer-streaming-shared": "3.0.11-alpha.10",
+ "xgplayer-transmuxer": "3.0.9-rc.12",
+ "xgplayer-streaming-shared": "3.0.11-rc.3",
"eventemitter3": "^4.0.7"
},
"peerDependencies": {
diff --git a/packages/xgplayer-mp4-loader/src/config.js b/packages/xgplayer-mp4-loader/src/config.js
index 3dbf36a27..5ba2321d0 100644
--- a/packages/xgplayer-mp4-loader/src/config.js
+++ b/packages/xgplayer-mp4-loader/src/config.js
@@ -1,10 +1,14 @@
export function getConfig (cfg) {
+ if (typeof cfg.fixEditListOffset !== 'boolean') {
+ delete cfg.fixEditListOffset
+ }
return {
vid: '',
moovEnd: 80000,
segmentDuration: 2,
maxDownloadInfoSize: 30,
responseType: 'arraybuffer',
+ fixEditListOffset: true,
cache: null,
// ...xgplayer-streaming-shared/src/net/config
...cfg
diff --git a/packages/xgplayer-mp4-loader/src/gopItem.js b/packages/xgplayer-mp4-loader/src/gopItem.js
new file mode 100644
index 000000000..75e3ef126
--- /dev/null
+++ b/packages/xgplayer-mp4-loader/src/gopItem.js
@@ -0,0 +1,21 @@
+export default class GopItem {
+ constructor () {
+ this.frames = []
+ this.dur = 0
+ this.minPts = Number.MAX_VALUE
+ this.maxPts = 0
+ }
+ appendFrame (frame) {
+ this.frames.push(frame)
+ this.dur += frame.duration
+ }
+ calMinMaxPts (frame) {
+ if (frame.pts < this.minPts){
+ this.minPts = frame.pts
+ }
+ const pts = frame.pts + frame.duration
+ if (pts > this.maxPts){
+ this.maxPts = pts
+ }
+ }
+}
diff --git a/packages/xgplayer-mp4-loader/src/loader.js b/packages/xgplayer-mp4-loader/src/loader.js
index 14358b217..aada4d3a2 100644
--- a/packages/xgplayer-mp4-loader/src/loader.js
+++ b/packages/xgplayer-mp4-loader/src/loader.js
@@ -1,9 +1,9 @@
-import { NetLoader, concatUint8Array, Logger, EVENT } from 'xgplayer-streaming-shared'
+import { NetLoader, newUint8Array, concatUint8Array, Logger, EVENT } from 'xgplayer-streaming-shared'
import { MP4Parser } from 'xgplayer-transmuxer'
import { getConfig } from './config'
import { MediaError } from './error'
import { Cache } from './cache'
-import { isNumber, moovToMeta, moovToSegments } from './utils'
+import { isNumber, moovToMeta, moovToSegments, isSegmentsOk } from './utils'
import EventEmitter from 'eventemitter3'
export class MP4Loader extends EventEmitter {
@@ -17,6 +17,7 @@ export class MP4Loader extends EventEmitter {
_currentSegmentIndex = -1
_currentLoadingSegmentIndex = -1
buffer
+ bufferDataLen = 0
_error
constructor (config) {
@@ -76,10 +77,32 @@ export class MP4Loader extends EventEmitter {
return this.meta
}
- async loadMetaProcess (cache, [moovStart, moovEnd], onProgress, config) {
+ newBufferArray (size, isExp) {
+ if (!this.buffer) {
+ this.buffer = newUint8Array(size)
+ this.bufferDataLen = 0
+ } else if (isExp || this.buffer?.byteLength < size) {
+ const data = this.buffer.subarray(0,this.bufferDataLen)
+ const temp = newUint8Array((this.buffer?.byteLength || 0) + size)
+ temp.set(data,0)
+ delete this.buffer
+ this.buffer = temp
+ }
+ }
+
+ async loadMetaProcess (cache, [moovStart, moovEnd], onProgress, config = {}) {
this._error = false
this.logger.debug('[loadMetaProcess start], range,', [moovStart, moovEnd])
- const OnProgressHandle = async (data, state, options) => {
+ if (this._config?.memoryOpt) {
+ if (!this.buffer || config?.isExp) {
+ try {
+ this.newBufferArray(moovEnd - moovStart + 1, config?.isExp)
+ } catch (e) {
+ onProgress(null, true, {}, new MediaError(e?.message), {})
+ }
+ }
+ }
+ const OnProgressHandle = async (data, state, options, response) => {
if (this.meta && options?.range && options.range.length > 0 && options.range[1] >= moovEnd) {
state = true
this.logger.debug('[loadMetaProcess],data done,setstate true,[', moovStart, moovEnd, ']')
@@ -89,17 +112,30 @@ export class MP4Loader extends EventEmitter {
this.logger.debug('[loadMetaProcess],data not done,setstate false,[', moovStart, moovEnd, ']')
}
this.logger.debug('[loadMetaProcess],task,[', moovStart, moovEnd, '], range,', options.range, ',dataLen,', (data ? data.byteLength : undefined), ', state,', state, ',err,',this._error)
- !this._error && data && data.byteLength > 0 && onProgress(data, state, options)
+ !this._error && data && data.byteLength > 0 && onProgress(data, state, options, null, response)
if (this.meta.moov || this._error) return
+ let parseData
if (data && data.byteLength > 0) {
- this.buffer = concatUint8Array(this.buffer, data)
- let moov = MP4Parser.findBox(this.buffer, ['moov'])[0]
+ try {
+ if (this._config?.memoryOpt) {
+ this.buffer.set(data, this.bufferDataLen)
+ this.bufferDataLen += data?.byteLength || 0
+ parseData = this.buffer.subarray(0, this.bufferDataLen)
+ } else {
+ this.buffer = concatUint8Array(this.buffer, data)
+ parseData = this.buffer
+ }
+ } catch (e) {
+ onProgress(null, state, options, new MediaError(e?.message), response)
+ return
+ }
+ let moov = MP4Parser.findBox(parseData, ['moov'])[0]
if (!moov) {
- const mdat = MP4Parser.findBox(this.buffer, ['mdat'])[0]
+ const mdat = MP4Parser.findBox(parseData, ['mdat'])[0]
if (state) {
if (!mdat) {
this._error = true
- onProgress(null, state, options, {err:'cannot find moov or mdat box'})
+ onProgress(null, state, options, new MediaError('cannot find moov or mdat box'), response)
return
// throw new MediaError('cannot find moov or mdat box')
} else {
@@ -112,22 +148,22 @@ export class MP4Loader extends EventEmitter {
}
}
if (moov && state && moov.size > moov.data.length) {
- this.logger.debug('[loadMetaProcess],moov not all, range,', options.range[1], ',dataLen,', this.buffer.byteLength, ', state,', state)
- await this.loadMetaProcess(cache, [options.range[1], moov.start + moov.size - 1], onProgress)
+ this.logger.debug('[loadMetaProcess],moov not all, range,', options.range[1], ',dataLen,', this.bufferDataLen, ', state,', state)
+ await this.loadMetaProcess(cache, [options.range[1], moov.start + moov.size - 1], onProgress, {isExp:true})
}
if (moov && moov.size <= moov.data.length && !this.meta.moov) {
const parsedMoov = MP4Parser.moov(moov)
if (!parsedMoov) {
this._error = true
- onProgress(null, state, options, {err:'cannot parse moov box'})
+ onProgress(null, state, options, new MediaError('cannot parse moov box'), response)
return
// throw new MediaError('cannot parse moov box', moov.data)
}
- const segments = moovToSegments(parsedMoov, this._config.segmentDuration)
- if (!segments) {
+ const segments = moovToSegments(parsedMoov, this._config)
+ if (!isSegmentsOk(segments)) {
this._error = true
- onProgress(null, state, options, {err:'cannot parse segments'})
+ onProgress(null, state, options, new MediaError('cannot parse segments'), response)
return
// throw new MediaError('cannot parse segments', moov.data)
}
@@ -136,6 +172,10 @@ export class MP4Loader extends EventEmitter {
const { videoSegments, audioSegments } = segments
this.videoSegments = videoSegments
this.audioSegments = audioSegments
+ if (this._config?.memoryOpt) {
+ delete this.buffer
+ this.bufferDataLen = 0
+ }
this.logger.debug('[loadMetaProcess] moov ok')
onProgress(undefined, state, {
meta: {
@@ -143,14 +183,14 @@ export class MP4Loader extends EventEmitter {
videoSegments,
audioSegments
}
- })
+ }, null, response)
}
}
}
await this.loadData([moovStart, moovEnd || this._config.moovEnd], cache, { onProgress: OnProgressHandle, ...config})
}
- async loadMeta (cache, moovEnd, config) {
+ async loadMeta (cache, moovEnd, config = {}) {
const responses = []
this.logger.debug('[loadMeta start]')
let res = await this.loadData([0, moovEnd || this._config.moovEnd], cache, config)
@@ -181,9 +221,8 @@ export class MP4Loader extends EventEmitter {
if (!parsedMoov) {
throw new MediaError('cannot parse moov box', moov.data)
}
-
- const segments = moovToSegments(parsedMoov, this._config.segmentDuration)
- if (!segments) {
+ const segments = moovToSegments(parsedMoov, this._config)
+ if (!isSegmentsOk(segments)) {
throw new MediaError('cannot parse segments', moov.data)
}
@@ -191,6 +230,8 @@ export class MP4Loader extends EventEmitter {
const { videoSegments, audioSegments } = segments
this.videoSegments = videoSegments
this.audioSegments = audioSegments
+ delete this.buffer
+ this.bufferDataLen = 0
this.logger.debug('[load moov end!!!!!]')
return {
meta: this.meta,
@@ -202,7 +243,7 @@ export class MP4Loader extends EventEmitter {
loadCacheMeta (meta, segmentIndex){
const { moov } = meta
- const segments = moovToSegments(moov, this._config.segmentDuration)
+ const segments = moovToSegments(moov, this._config)
const { videoSegments, audioSegments } = segments
this.videoSegments = videoSegments
this.audioSegments = audioSegments
@@ -282,6 +323,10 @@ export class MP4Loader extends EventEmitter {
this.audioSegments = []
this._currentSegmentIndex = -1
this._currentLoadingSegmentIndex = -1
+ if (this._config?.memoryOpt) {
+ delete this.buffer
+ this.bufferDataLen = 0
+ }
}
async destroy () {
@@ -315,12 +360,12 @@ export class MP4Loader extends EventEmitter {
return res
}
- async loadData (range, cache, config) {
+ async loadData (range, cache, config = {}) {
const cacheKey = this._getCacheKey(range)
const data = await this.cache.get(cacheKey)
let res
if (!data) {
- const url = config && config.url ? config.url : this.url
+ const url = config?.url ? config.url : this.url
res = await this._loader.load(url, { range, vid: this.vid, ...config })
} else {
res = { data, state: true, options: { fromCache: true, range, vid: this.vid } }
diff --git a/packages/xgplayer-mp4-loader/src/utils.js b/packages/xgplayer-mp4-loader/src/utils.js
index 3fe988a60..42d62f3a7 100644
--- a/packages/xgplayer-mp4-loader/src/utils.js
+++ b/packages/xgplayer-mp4-loader/src/utils.js
@@ -1,5 +1,21 @@
+import GopItem from './gopItem'
+function isEdtsApplicable () {
+ let flag = true
+ const userAgent = navigator.userAgent || ''
+ const isChrome = /Chrome/gi.test(userAgent) && !/Edge\//gi.test(userAgent)
-export function moovToSegments (moov, duration) {
+ // M75+ 开始支持负的 dts
+ // https://bugs.chromium.org/p/chromium/issues/detail?id=398141
+ if (isChrome) {
+ const result = userAgent.match(/Chrome\/(\d+)/i)
+ const chromeVersion = result ? parseInt(result[1], 10) : 0
+ flag = !!chromeVersion && chromeVersion >= 75
+ }
+ return flag
+}
+
+export function moovToSegments (moov, config) {
+ const { segmentDuration } = config
const tracks = moov.trak
if (!tracks || !tracks.length) return
const videoTrack = tracks.find(t => t.mdia?.hdlr?.handlerType === 'vide')
@@ -11,95 +27,204 @@ export function moovToSegments (moov, duration) {
let segmentDurations
if (videoTrack) {
- const videoStbl = videoTrack.mdia?.minf?.stbl
- if (!videoStbl) return
- const timescale = videoTrack.mdia.mdhd?.timescale
- const { stts, stsc, stsz, stco, stss, ctts } = videoStbl
- if (!timescale || !stts || !stsc || !stsz || !stco || !stss) return
- videoSegments = getSegments(duration, timescale, stts, stsc, stsz, stco, stss, ctts)
+ videoSegments = getSegments('video', videoTrack, segmentDuration, config)
segmentDurations = videoSegments.map(x => x.duration)
}
if (audioTrack) {
- const audioStbl = audioTrack.mdia?.minf?.stbl
- if (!audioStbl) return
- const timescale = audioTrack.mdia.mdhd?.timescale
- const { stts, stsc, stsz, stco } = audioStbl
- if (!timescale || !stts || !stsc || !stsz || !stco) return
- audioSegments = getSegments(duration, timescale, stts, stsc, stsz, stco, null, null, segmentDurations)
+ audioSegments = getSegments(
+ 'audio',
+ audioTrack,
+ segmentDuration,
+ config,
+ segmentDurations,
+ videoSegments
+ )
}
-
return {
videoSegments,
audioSegments
}
}
-function getSegments (segDuration, timescale, stts, stsc, stsz, stco, stss, ctts, segmentDurations) {
+function getSegments (
+ type,
+ track,
+ segDuration,
+ config,
+ segmentDurations = [],
+ videoSegments
+) {
+ const { fixEditListOffset, fixEditListOffsetThreshold, audioGroupingStrategy, memoryOpt } = config
+ const stbl = track.mdia?.minf?.stbl
+ if (!stbl) {
+ return []
+ }
+
+ const timescale = track.mdia.mdhd?.timescale
+ const { stts, stsc, stsz, stco, stss, ctts } = stbl
+ if (!timescale || !stts || !stsc || !stsz || !stco || (type === 'video' && !stss)) {
+ return []
+ }
+
+ // chrome等浏览器内核为了修复B帧引入的CTS偏移时间,对于edts->elst box中的media_time进行了参考
+ // 目前chrome仅读取media_time,不支持编辑列表的其他用途,因为它们不常见并且由更高级的协议提供更好的服务。
+ // 如果不参考editList信息,一些视频会有音画不同步问题
+ let editListOffset = 0
+ const editList = track.edts?.elst?.entries
+ if (
+ fixEditListOffset &&
+ isEdtsApplicable() &&
+ Array.isArray(editList) &&
+ editList.length > 0
+ ) {
+ const media_time = editList[0].media_time
+ const maxAllowedTime = fixEditListOffsetThreshold
+ ? fixEditListOffsetThreshold * timescale
+ : 5 * timescale
+ if (media_time > 0 && media_time < maxAllowedTime) {
+ editListOffset = media_time
+ }
+ }
+
const frames = []
const gop = []
const gopDuration = []
+ let gopMinPtsArr = [] // 记录每个gop中最小的pts,用于计算每个gop的startTime
+ let gopMaxPtsFrameIdxArr = [] // 记录每个gop中最大的pts,用于计算每个gop的endTime
const stscEntries = stsc.entries
const stcoEntries = stco.entries
const stszEntrySizes = stsz.entrySizes
const stssEntries = stss?.entries
const cttsEntries = ctts?.entries
- let cttsArr
- if (cttsEntries) {
- cttsArr = []
- cttsEntries.forEach(({ count, offset }) => {
- for (let i = 0; i < count; i++) {
- cttsArr.push(offset)
- }
- })
- }
- let keyframeMap
- if (stssEntries) {
- keyframeMap = {}
- stssEntries.forEach(x => { keyframeMap[x - 1] = true })
+ const cttsArr = []
+ const keyframeMap = {}
+ if (!memoryOpt) {
+ if (cttsEntries) {
+ cttsEntries.forEach(({ count, offset }) => {
+ for (let i = 0; i < count; i++) {
+ cttsArr.push(offset)
+ }
+ })
+ }
+ if (stssEntries) {
+ stssEntries.forEach(x => {
+ keyframeMap[x - 1] = true
+ })
+ }
}
let frame
let duration
- let startTime = 0
+ // let startTime = 0
let pos = 0
let chunkIndex = 0
let chunkRunIndex = 0
let offsetInChunk = 0
- let lastSampleInChunk = stscEntries[0].samplesPerChunk
- let lastChunkInRun = stscEntries[1] ? stscEntries[1].firstChunk - 1 : Infinity
+ let lastSampleInChunk = stscEntries.length > 0 ? stscEntries[0].samplesPerChunk : 0
+ let lastChunkInRun = stscEntries.length > 1 && stscEntries[1] ? stscEntries[1].firstChunk - 1 : Infinity
let dts = 0
let gopId = -1
+ let editListApplied = false
+ const beforeCttsInfo = {}
+
+ if (cttsEntries?.length > 0 && editListOffset > 0) {
+ // 参考chromium原生播放时,ffmpeg_demuxer处理edts后的逻辑:
+ // FFmpeg将所有AVPacket dts值根据editListOffset进行偏移,以确保解码器有足够的解码时间(即保持cts不变,dts从负值开始)
+ // FFmpeg对于音频的AVPacket dts/pts虽然也进行了偏移,但在chromium中最后给到decoder时又将其偏移修正回0
+ // 因此,这里的逻辑是为了触发baseMediaDecodeTime变化,并且只修正视频,不处理音频
+ dts -= editListOffset
+ editListApplied = true
+ }
+
+ track.editListApplied = editListApplied
+ let curSyncSampleNum
+ if (memoryOpt) {
+ curSyncSampleNum = stssEntries?.shift()
+ }
stts.entries.forEach(({ count, delta }) => {
- duration = delta // / timescale
+ duration = delta // in timescale
for (let i = 0; i < count; i++) {
frame = {
dts,
- startTime,
duration,
size: stszEntrySizes[pos] || stsz.sampleSize,
offset: stcoEntries[chunkIndex] + offsetInChunk,
index: pos
}
+ // 计算pts
+ if (cttsEntries) {
+ if (memoryOpt) {
+ getCTTSOffset(cttsEntries, pos, beforeCttsInfo)
+ frame.pts = dts + (beforeCttsInfo?.offset || 0)
+ } else {
+ if (cttsArr && pos < cttsArr.length) {
+ frame.pts = dts + cttsArr[pos]
+ }
+ }
+ }
+ if (editListOffset === 0 && pos === 0) {
+ frame.pts = 0
+ }
+ // 补足音频的pts
+ if (frame.pts === undefined) {
+ frame.pts = frame.dts
+ }
if (stssEntries) {
- frame.keyframe = keyframeMap[pos]
+ if (memoryOpt) {
+ if (pos + 1 === curSyncSampleNum) {
+ frame.keyframe = true
+ // Because the stss table is arranged in strictly increasing order of sample number,
+ // Therefore use array.shift to get the next sync sample number
+ curSyncSampleNum = stssEntries.shift()
+ }
+ } else {
+ frame.keyframe = keyframeMap[pos]
+ }
if (frame.keyframe) {
gopId++
- gop.push([frame])
- gopDuration.push(frame.duration)
+ if (!memoryOpt) {
+ gop.push([frame])
+ gopDuration.push(frame.duration)
+ } else {
+ const gopItem = new GopItem()
+ gopItem.appendFrame(frame)
+ gop.push(gopItem)
+ }
} else {
- gop[gop.length - 1].push(frame)
- gopDuration[gop.length - 1] += frame.duration
+ if (!memoryOpt) {
+ gop[gop.length - 1].push(frame)
+ gopDuration[gop.length - 1] += frame.duration
+ } else {
+ gop[gop.length - 1].appendFrame(frame)
+ }
}
frame.gopId = gopId
+ if (memoryOpt) {
+ gop[gop.length - 1].calMinMaxPts(frame)
+ }
}
- if (cttsArr && pos < cttsArr.length) {
- frame.pts = dts + cttsArr[pos]
- }
- if (pos === 0) {
- frame.pts = 0
+ if (!memoryOpt) {
+ // 更新当前gop中最小的pts
+ if (frame.keyframe) {
+ gopMinPtsArr[gopMinPtsArr.length] = frame.pts
+ // gopMinPtsArr.push(frame.pts)
+ } else {
+ if (frame.pts < gopMinPtsArr[gop.length - 1]) {
+ gopMinPtsArr[gop.length - 1] = frame.pts
+ }
+ }
+ // 更新当前gop中最大的pts
+ if (frame.keyframe) {
+ gopMaxPtsFrameIdxArr[gopMaxPtsFrameIdxArr.length] = frame.index
+ // gopMaxPtsFrameIdxArr.push(frame.index)
+ } else if (gop.length > 0 && gopMaxPtsFrameIdxArr[gop.length - 1] !== undefined) {
+ const curMaxPts = frames[gopMaxPtsFrameIdxArr[gop.length - 1]]?.pts
+ if (curMaxPts !== undefined && frame.pts > curMaxPts) {
+ gopMaxPtsFrameIdxArr[gop.length - 1] = frame.index
+ }
+ }
}
- frames.push(frame)
- startTime += duration
+ frames[frames.length] = frame
dts += delta
pos++
@@ -110,7 +235,9 @@ function getSegments (segDuration, timescale, stts, stsc, stsz, stco, stss, ctts
offsetInChunk = 0
if (chunkIndex >= lastChunkInRun) {
chunkRunIndex++
- lastChunkInRun = stscEntries[chunkRunIndex + 1] ? stscEntries[chunkRunIndex + 1].firstChunk - 1 : Infinity
+ lastChunkInRun = stscEntries[chunkRunIndex + 1]
+ ? stscEntries[chunkRunIndex + 1].firstChunk - 1
+ : Infinity
}
lastSampleInChunk += stscEntries[chunkRunIndex].samplesPerChunk
}
@@ -118,51 +245,136 @@ function getSegments (segDuration, timescale, stts, stsc, stsz, stco, stss, ctts
})
const l = frames.length
- if (!l || (stss && !frames[0].keyframe)) return
+ if (!l || (stss && !frames[0].keyframe)) {
+ return []
+ }
const segments = []
let segFrames = []
let time = 0
- let lastFrame
let adjust = 0
- const pushSegment = (duration) => {
- lastFrame = segFrames[segFrames.length - 1]
+ let segMinPts = 0
+ let segMaxPts = 0
+ let segLastFrames
+ let segMaxPtsFrame
+ const pushSegment = (duration, startGopIdx, endGopIdx) => {
+ segLastFrames = segFrames[segFrames.length - 1]
+ if (memoryOpt) {
+ segMinPts = gop?.length > 0 ? gop[startGopIdx].minPts : segFrames[0].pts
+ segMaxPts = gop?.length > 0 ? gop[endGopIdx].maxPts : (segLastFrames.pts + segLastFrames.duration)
+ } else {
+ segMinPts = gopMinPtsArr[startGopIdx]
+ segMaxPtsFrame = frames[gopMaxPtsFrameIdxArr[endGopIdx]]
+ segMaxPts = segMaxPtsFrame.pts + segMaxPtsFrame.duration
+ }
+ // 因为强制把视频第一帧的pts改为0 ,所以第一个gop的时长可能和endTime - startTime对应不上
+ // 需要修正下,不然音频根据视频gop时长截取的第一个关键帧起始的误差较大
+ if (segments.length === 0) {
+ const diff = segMaxPts - segMinPts
+ duration = diff / timescale
+ }
segments.push({
index: segments.length,
- startTime: (segments[segments.length - 1]?.endTime || segFrames[0].startTime / timescale),
- endTime: (lastFrame.startTime + lastFrame.duration) / timescale,
+ startTime: segMinPts / timescale, // (segments[segments.length - 1]?.endTime || segFrames[0].startTime / timescale),
+ endTime: segMaxPts / timescale,
duration: duration,
- range: [segFrames[0].offset, lastFrame.offset + lastFrame.size],
+ range: [segFrames[0].offset, segLastFrames.offset + segLastFrames.size - 1],
frames: segFrames
})
- time = 0
+
+ if (audioGroupingStrategy !== 1) {
+ time = 0
+ }
+
segFrames = []
}
+ let segGopStartIdx = 0
if (stss) {
const duration = segDuration * timescale
for (let i = 0, l = gop.length; i < l; i++) {
- time += gopDuration[i]
- segFrames.push(...gop[i])
+ if (memoryOpt) {
+ time += gop[i].dur
+ segFrames.push(...gop[i].frames)
+ } else {
+ time += gopDuration[i]
+ segFrames.push(...gop[i])
+ }
if (i + 1 < l) {
if (i === 0 || time > duration) {
- pushSegment(time / timescale)
+ pushSegment(time / timescale, segGopStartIdx, i)
+ time = 0
+ segGopStartIdx = i + 1
}
} else {
- pushSegment(time / timescale)
+ pushSegment(time / timescale, segGopStartIdx, i)
+ time = 0
+ segGopStartIdx = i + 1
}
}
} else {
- segmentDurations = segmentDurations || []
+ if (!memoryOpt) {
+ gopMinPtsArr = []
+ gopMaxPtsFrameIdxArr = []
+ }
let duration = segmentDurations[0] || segDuration
- for (let i = 0; i < l; i++) {
- segFrames.push(frames[i])
- time += frames[i].duration
- const curTime = time / timescale
- if (i + 1 >= l || curTime + adjust >= duration) {
- adjust += curTime - duration
- pushSegment(curTime)
- duration = segmentDurations[segments.length] || segDuration
+
+ if (audioGroupingStrategy === 1) {
+ for (let i = 0, nextEndTime; i < l; i++) {
+ const curFrame = frames[i]
+ const nextFrame = frames[i + 1]
+ const isFinalFrame = i === l - 1
+ segFrames.push(curFrame)
+ time += curFrame.duration
+ const curEndTime = nextEndTime || time / timescale
+ // 这里使用下一帧的目的是将每个分组的起始音频帧应该覆盖或包含GOP的开始时间,
+ // MSE在remove buffer时会将gop结束时间点的那个音频帧删掉,这个策略就是为了
+ // 防止之后再添加新的Coded Frame Group时由于缺少了一帧音频容易产生Buffer gap
+ nextEndTime = (nextFrame ? time + nextFrame.duration : 0) / timescale
+ if (
+ isFinalFrame ||
+ (
+ videoSegments[segments.length]
+ ? nextEndTime > videoSegments[segments.length].endTime /* 有视频帧,使用GOP时间戳进行分割 */
+ : nextEndTime - segFrames[0].pts / timescale >= duration /* 无视频帧(包含音频帧大于视频时长的剩余音频帧分组的场景),使用配置的切片时间或最后一个GOP时长进行分割 */
+ )
+ ) {
+ if (!memoryOpt) {
+ gopMinPtsArr.push(segFrames[0].pts)
+ gopMaxPtsFrameIdxArr.push(segFrames[segFrames.length - 1].index)
+ }
+ pushSegment(curEndTime, segments.length, segments.length)
+ duration = segmentDurations[segments.length] || segDuration
+ }
+ }
+ } else {
+ for (let i = 0, nextEndTime; i < l; i++) {
+ const curFrame = frames[i]
+ const nextFrame = frames[i + 1]
+ const isFinalFrame = i === l - 1
+ segFrames.push(curFrame)
+ time += curFrame.duration
+ const curEndTime = nextEndTime || time / timescale
+ nextEndTime = (nextFrame ? time + nextFrame.duration : 0) / timescale
+ if (
+ isFinalFrame ||
+ // 这里使用下一帧的目的是将每个分组的起始音频帧应该覆盖或包含GOP的开始时间,
+ // MSE在remove buffer时会将gop结束时间点的那个音频帧删掉,这个策略就是为了
+ // 防止之后再添加新的Coded Frame Group时由于缺少了一帧音频容易产生Buffer gap
+ nextEndTime + adjust >= duration
+ ) {
+ if (audioGroupingStrategy === 2) {
+ adjust += time / timescale - duration
+ } else {
+ adjust += nextEndTime - duration
+ }
+ if (!memoryOpt) {
+ gopMinPtsArr.push(segFrames[0].pts)
+ gopMaxPtsFrameIdxArr.push(segFrames[segFrames.length - 1].index)
+ }
+ pushSegment(curEndTime, segments.length, segments.length)
+ duration = segmentDurations[segments.length] || segDuration
+ }
}
}
}
@@ -170,6 +382,36 @@ function getSegments (segDuration, timescale, stts, stsc, stsz, stco, stss, ctts
return segments
}
+function getCTTSOffset (cttsEntries, frameIndex, beforeCttsInfo) {
+ // const ret = {}
+ beforeCttsInfo.offset = 0
+ const beforeFrameNum = beforeCttsInfo?.beforeFrameNum || 0
+ let currentCttsIdx = beforeCttsInfo?.usedCttsIdx || 0
+ if (!cttsEntries || cttsEntries?.length <= 0 || beforeCttsInfo?.usedCttsIdx >= cttsEntries.length) {
+ beforeCttsInfo.offset = 0
+ beforeCttsInfo.usedCttsIdx = currentCttsIdx
+ // curUsedCttsIdx前的count的累计值
+ beforeCttsInfo.beforeFrameNum = beforeFrameNum
+ } else {
+ const curerentCTTS = cttsEntries[currentCttsIdx]
+ const count = curerentCTTS?.count || 1
+ if (frameIndex < beforeFrameNum + count) {
+ beforeCttsInfo.offset = curerentCTTS?.offset || 0
+ } else {
+ currentCttsIdx ++
+ const newCTTS = cttsEntries[currentCttsIdx]
+ if (!newCTTS) {
+ beforeCttsInfo.offset = 0
+ beforeCttsInfo.beforeFrameNum = beforeFrameNum + 1
+ } else {
+ beforeCttsInfo.offset = newCTTS.offset || 0
+ beforeCttsInfo.beforeFrameNum = beforeFrameNum + curerentCTTS.count
+ }
+ beforeCttsInfo.usedCttsIdx = currentCttsIdx
+ }
+ }
+}
+
export function moovToMeta (moov) {
let videoCodec = ''
let audioCodec = ''
@@ -197,7 +439,7 @@ export function moovToMeta (moov) {
width = e1.width
height = e1.height
videoTimescale = videoTrack.mdia?.mdhd?.timescale
- videoCodec = (e1.avcC || e1.hvcC)?.codec
+ videoCodec = (e1.avcC || e1.hvcC || e1.vvcC)?.codec
if (e1.type === 'encv') {
defaultKID = e1.sinf?.schi?.tenc.default_KID
}
@@ -234,3 +476,17 @@ export function moovToMeta (moov) {
export function isNumber (n) {
return typeof n === 'number' && !Number.isNaN(n)
}
+
+
+export function isSegmentsOk (segments) {
+ if (!segments) {
+ return false
+ }
+ const {audioSegments , videoSegments} = segments
+ const v = !videoSegments || videoSegments.length === 0
+ const a = !audioSegments || audioSegments.length === 0
+ if (v && a) {
+ return false
+ }
+ return true
+}
diff --git a/packages/xgplayer-mp4/src/error.js b/packages/xgplayer-mp4/src/error.js
index 2b39500e2..4ae9c4588 100644
--- a/packages/xgplayer-mp4/src/error.js
+++ b/packages/xgplayer-mp4/src/error.js
@@ -20,7 +20,7 @@ const ERROR_CODES = {
other: ERR_CODE[ERR.OTHER], // -499989,
waitTimeout: ERR_CODE[ERR.RUNTIME][ERR.SUB_TYPES.BUFFERBREAK_ERROR],// -499791,
waitTimeoutWithHidden : ERR_CODE[ERR.RUNTIME][ERR.SUB_TYPES.WAITING_TIMEOUT_ERROR],
- drm: ERR_CODE[ERR.DRM][ERR.SUB_TYPES.LICENSE],
+ drm: ERR_CODE[ERR.DRM][ERR.SUB_TYPES.LICENSE]
}
const ERROR_TYPES = ERR
@@ -54,8 +54,8 @@ class NetWorkError {
ext: context,
mediaError: {
code: _errCode,
- message: context?.httpText || context?.message,
- },
+ message: context?.httpText || context?.message
+ }
}
}
}
@@ -71,8 +71,8 @@ class ParserError {
ext,
mediaError: {
code: errorCode,
- message: ext.msg,
- },
+ message: ext.msg
+ }
}
}
}
@@ -83,5 +83,5 @@ export {
ParserError,
ERROR_CODES,
ERROR_TYPES,
- getErrorCodeByHttpCode,
+ getErrorCodeByHttpCode
}
diff --git a/packages/xgplayer-mp4/src/mp4.js b/packages/xgplayer-mp4/src/mp4.js
index 72b48d124..7774c7054 100644
--- a/packages/xgplayer-mp4/src/mp4.js
+++ b/packages/xgplayer-mp4/src/mp4.js
@@ -66,6 +66,7 @@ class MP4 extends EventEmitter {
retryDelay: this.options.retryDelay,
timeout: this.options.timeout,
...options.reqOptions,
+ memoryOpt: this.options.memoryOpt,
openLog: checkOpenLog()
})
this.MP4Demuxer = null
@@ -432,7 +433,7 @@ class MP4 extends EventEmitter {
} else {
try {
if (!this.MP4Demuxer) {
- this.MP4Demuxer = new MP4Demuxer(this.videoTrak, this.audioTrak, null,{openLog: checkOpenLog()})
+ this.MP4Demuxer = new MP4Demuxer(this.videoTrak, this.audioTrak, null,{openLog: checkOpenLog(), memoryOpt: this.options.memoryOpt})
}
const demuxRet = this.MP4Demuxer.demuxPart(buffer, start, videoIndexRange, audioIndexRange, this.meta.moov, this.useEME, this.kidValue)
if (!this.FMP4Remuxer && (!this.checkCodecH265() || this.options.supportHevc)) {
diff --git a/packages/xgplayer-mp4/src/mp4Plugin.js b/packages/xgplayer-mp4/src/mp4Plugin.js
index 27ed3dbd5..4f06b4594 100644
--- a/packages/xgplayer-mp4/src/mp4Plugin.js
+++ b/packages/xgplayer-mp4/src/mp4Plugin.js
@@ -403,7 +403,7 @@ export default class Mp4Plugin extends BasePlugin {
/**
* 销毁MSE对象 // 在重用MSE的时候,如果降级到video原生播放,单实例复用时,需要重新绑定url.所以降级到video原生的需要删除mse对象
*/
- async destroyMSE() {
+ async destroyMSE () {
await this.mse?.unbindMedia()
this.mse = null
}
diff --git a/packages/xgplayer-streaming-shared/package.json b/packages/xgplayer-streaming-shared/package.json
index 76991e38e..01097420e 100755
--- a/packages/xgplayer-streaming-shared/package.json
+++ b/packages/xgplayer-streaming-shared/package.json
@@ -1,6 +1,6 @@
{
"name": "xgplayer-streaming-shared",
- "version": "3.0.11-alpha.10",
+ "version": "3.0.11-rc.3",
"main": "dist/index.min.js",
"module": "es/index.js",
"typings": "es/index.d.ts",
@@ -15,7 +15,7 @@
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public",
- "tag": "alpha"
+ "tag": "rc"
},
"license": "MIT",
"dependencies": {
diff --git a/packages/xgplayer-streaming-shared/src/net/config.js b/packages/xgplayer-streaming-shared/src/net/config.js
index 90a57f865..d96619330 100644
--- a/packages/xgplayer-streaming-shared/src/net/config.js
+++ b/packages/xgplayer-streaming-shared/src/net/config.js
@@ -28,6 +28,7 @@ export function getConfig (cfg) {
referrerPolicy: undefined,
integrity: undefined,
onProcessMinLen: 0,
+ processMaxGapTime: Infinity, // process流式获取数据时,在请求没有结束前,两次读取到数据的最大超时时间
...cfg
}
}
diff --git a/packages/xgplayer-streaming-shared/src/net/fetch.js b/packages/xgplayer-streaming-shared/src/net/fetch.js
index 00b37e02d..df335d2e3 100644
--- a/packages/xgplayer-streaming-shared/src/net/fetch.js
+++ b/packages/xgplayer-streaming-shared/src/net/fetch.js
@@ -19,6 +19,7 @@ export class FetchLoader extends EventEmitter {
_onProcessMinLen = 0
_onCancel = null
_priOptions = null // 比较私有化的参数传递,回调时候透传
+ _processMaxGapTime = Infinity
constructor () {
super()
@@ -49,6 +50,7 @@ export class FetchLoader extends EventEmitter {
referrer,
referrerPolicy,
onProcessMinLen,
+ processMaxGapTime,
priOptions
}) {
this._logger = logger
@@ -57,10 +59,12 @@ export class FetchLoader extends EventEmitter {
this._onCancel = onCancel
this._abortController = typeof AbortController !== 'undefined' && new AbortController()
this._running = true
+ this._receivedLength = 0
this._index = index
this._range = range || [0, 0]
this._vid = vid || url
this._priOptions = priOptions || {}
+ this._processMaxGapTime = processMaxGapTime
const init = {
method,
headers,
@@ -94,9 +98,9 @@ export class FetchLoader extends EventEmitter {
}
if (timeout) {
- this._timeoutTimer = setTimeout(() => {
+ this._timeoutTimer = setTimeout(async () => {
isTimeout = true
- this.cancel()
+ await this.cancel()
if (onTimeout) {
const error = new NetError(url, init, null, 'timeout')
error.isTimeout = true
@@ -216,6 +220,7 @@ export class FetchLoader extends EventEmitter {
let startTime
let endTime
+ let lastReadDataTime = Date.now()
const pump = async () => {
startTime = Date.now()
try {
@@ -240,12 +245,12 @@ export class FetchLoader extends EventEmitter {
}
const curLen = data.value ? data.value.byteLength : 0
this._receivedLength += curLen
- this._logger.debug('【fetchLoader,onProgress call】,task,', this._range, ', start,', startByte, ', end,', startRange + this._receivedLength, ', done,', data.done)
+ // this._logger.debug('【fetchLoader,onProgress call】,task,', this._range, ', start,', startByte, ', end,', startRange + this._receivedLength, ', done,', data.done)
let retData
if (this._onProcessMinLen > 0) {
if (this._writeIdx + curLen >= this._onProcessMinLen || data.done) {
retData = new Uint8Array(this._writeIdx + curLen)
- retData.set(this._cache.slice(0, this._writeIdx), 0)
+ retData.set(this._cache.subarray(0, this._writeIdx), 0)
curLen > 0 && retData.set(data.value, this._writeIdx)
this._writeIdx = 0
this._logger.debug('【fetchLoader,onProgress enough】,done,', data.done, ',len,', retData.byteLength, ', writeIdx,', this._writeIdx)
@@ -257,7 +262,7 @@ export class FetchLoader extends EventEmitter {
} else if (curLen > 0) {
const temp = new Uint8Array(this._writeIdx + curLen + 2048)
this._logger.debug('【fetchLoader,onProgress extra start】,size,', this._writeIdx + curLen + 2048, ', datalen,', curLen, ', writeIdx,', this._writeIdx)
- temp.set(this._cache.slice(0, this._writeIdx), 0)
+ temp.set(this._cache.subarray(0, this._writeIdx), 0)
curLen > 0 && temp.set(data.value, this._writeIdx)
this._writeIdx += curLen
delete this._cache
@@ -269,6 +274,7 @@ export class FetchLoader extends EventEmitter {
retData = data.value
}
if (retData && retData.byteLength > 0 || data.done) {
+ this._logger.debug('【fetchLoader,onProgress call】,task,', this._range, ', start,', startByte, ', end,', startRange + this._receivedLength, ', done,', data.done,st )
onProgress(retData, data.done, {
range: [this._range[0] + this._receivedLength - (retData ? retData.byteLength : 0), this._range[0] + this._receivedLength],
vid: this._vid,
@@ -279,6 +285,15 @@ export class FetchLoader extends EventEmitter {
firstByteTime,
priOptions:this._priOptions
}, response)
+ lastReadDataTime = Date.now()
+ } else if (Date.now() - lastReadDataTime >= this._processMaxGapTime) {
+ this._logger.debug(`[onProgress timeout],task: ${JSON.stringify(this._range)} done: ${data.done} processMaxGapTime: ${this._processMaxGapTime}`)
+ const error = new NetError(response.url, null, response, 'process timeout')
+ error.options = {index: this._index, range: this._range, vid: this._vid, priOptions: this._priOptions}
+ this.running = false
+ await this.cancel()
+ this.reject(error)
+ return
}
if (!data.done) {
pump()
diff --git a/packages/xgplayer-streaming-shared/src/streaming-helper.js b/packages/xgplayer-streaming-shared/src/streaming-helper.js
index 8f1d488d2..943aa43ec 100644
--- a/packages/xgplayer-streaming-shared/src/streaming-helper.js
+++ b/packages/xgplayer-streaming-shared/src/streaming-helper.js
@@ -1,3 +1,4 @@
+export const NEW_ARRAY_MAX_CNT = 10
export function isMediaPlaying (media) {
return media && !media.paused && !media.ended && media.playbackRate !== 0 && media.readyState !== 0
}
@@ -27,7 +28,8 @@ export function getVideoPlaybackQuality (video) {
export function concatUint8Array (...arr) {
arr = arr.filter(Boolean)
if (arr.length < 2) return arr[0]
- const data = new Uint8Array(arr.reduce((p, c) => p + c.byteLength, 0))
+ const size = arr.reduce((p, c) => p + c.byteLength, 0)
+ const data = newUint8Array(size)
let prevLen = 0
arr.forEach((d) => {
data.set(d, prevLen)
@@ -36,6 +38,28 @@ export function concatUint8Array (...arr) {
return data
}
+export function newUint8Array (size) {
+ let cnt = 0
+ let array
+ while (cnt < NEW_ARRAY_MAX_CNT) {
+ try {
+ array = new Uint8Array(size)
+ if (array && array.byteLength > 0) {
+ break
+ } else {
+ cnt++
+ }
+ } catch (e) {
+ if (cnt < NEW_ARRAY_MAX_CNT) {
+ cnt++
+ } else {
+ throw new Error(`new array failed final,${e?.message}`)
+ }
+ }
+ }
+ return array
+}
+
export function sleep (t = 0) {
return new Promise((resolve) => setTimeout(resolve, t))
}
diff --git a/packages/xgplayer-transmuxer/package.json b/packages/xgplayer-transmuxer/package.json
index 485875cfb..2d81f6bf6 100755
--- a/packages/xgplayer-transmuxer/package.json
+++ b/packages/xgplayer-transmuxer/package.json
@@ -1,6 +1,6 @@
{
"name": "xgplayer-transmuxer",
- "version": "3.0.11-alpha.10",
+ "version": "3.0.9-rc.12",
"main": "dist/index.min.js",
"module": "es/index.js",
"typings": "es/index.d.ts",
@@ -15,7 +15,7 @@
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public",
- "tag": "alpha"
+ "tag": "rc"
},
"license": "MIT",
"unpkgFiles": [
diff --git a/packages/xgplayer-transmuxer/src/codec/ExpGolomb.js b/packages/xgplayer-transmuxer/src/codec/ExpGolomb.js
new file mode 100644
index 000000000..8660f0646
--- /dev/null
+++ b/packages/xgplayer-transmuxer/src/codec/ExpGolomb.js
@@ -0,0 +1,131 @@
+export default class ExpGolomb {
+ _bytesAvailable
+
+ _bitsAvailable = 0
+
+ _word = 0
+
+ constructor (data) {
+ if (!data) throw new Error('ExpGolomb data params is required')
+ this._data = data
+ this._bytesAvailable = data.byteLength
+ if (this._bytesAvailable) this._loadWord()
+ }
+
+ _loadWord () {
+ const position = this._data.byteLength - this._bytesAvailable
+ const availableBytes = Math.min(4, this._bytesAvailable)
+ if (availableBytes === 0) throw new Error('No bytes available')
+
+ const workingBytes = new Uint8Array(4)
+ workingBytes.set(this._data.subarray(position, position + availableBytes))
+
+ this._word = new DataView(workingBytes.buffer).getUint32(0)
+ this._bitsAvailable = availableBytes * 8
+ this._bytesAvailable -= availableBytes
+ }
+
+ bitsPos () {
+ return this._bytesAvailable * 8 - this._bitsAvailable
+ }
+
+ bitsLeft () {
+ return this._data.length * 8 - this.bitsPos()
+ }
+
+ byteAligned () {
+ return this.bitsPos() === 0 || (this.bitsPos() % 8 === 0)
+ }
+
+ skipBits (count) {
+ if (this._bitsAvailable > count) {
+ this._word <<= count
+ this._bitsAvailable -= count
+ } else {
+ count -= this._bitsAvailable
+ const skipBytes = Math.floor(count / 8)
+ count -= (skipBytes * 8)
+ this._bytesAvailable -= skipBytes
+ this._loadWord()
+ this._word <<= count
+ this._bitsAvailable -= count
+ }
+ }
+
+ readBits (size) {
+ if (size > 32) {
+ throw new Error('Cannot read more than 32 bits')
+ }
+
+ let bits = Math.min(this._bitsAvailable, size)
+ const val = this._word >>> (32 - bits)
+
+ this._bitsAvailable -= bits
+ if (this._bitsAvailable > 0) {
+ this._word <<= bits
+ } else if (this._bytesAvailable > 0) {
+ this._loadWord()
+ }
+
+ bits = size - bits
+ if (bits > 0 && this._bitsAvailable) {
+ return (val << bits) | this.readBits(bits)
+ }
+ return val
+ }
+
+ skipLZ () {
+ let leadingZeroCount
+ for (
+ leadingZeroCount = 0;
+ leadingZeroCount < this._bitsAvailable;
+ ++leadingZeroCount
+ ) {
+ if ((this._word & (0x80000000 >>> leadingZeroCount)) !== 0) {
+ this._word <<= leadingZeroCount
+ this._bitsAvailable -= leadingZeroCount
+ return leadingZeroCount
+ }
+ }
+ this._loadWord()
+ return leadingZeroCount + this.skipLZ()
+ }
+
+ skipUEG () {
+ this.skipBits(1 + this.skipLZ())
+ }
+
+ readUEG () {
+ const clz = this.skipLZ()
+ return this.readBits(clz + 1) - 1
+ }
+
+ readEG () {
+ const val = this.readUEG()
+ if (1 & val) {
+ return (1 + val) >>> 1
+ }
+ return -1 * (val >>> 1)
+ }
+
+ readBool () {
+ return this.readBits(1) === 1
+ }
+
+ readUByte () {
+ return this.readBits(8)
+ }
+
+ skipScalingList (count) {
+ let lastScale = 8
+ let nextScale = 8
+ let deltaScale
+ for (let j = 0; j < count; j++) {
+ if (nextScale !== 0) {
+ deltaScale = this.readEG()
+ nextScale = (lastScale + deltaScale + 256) % 256
+ }
+ lastScale = nextScale === 0 ? lastScale : nextScale
+ }
+ }
+}
diff --git a/packages/xgplayer-transmuxer/src/codec/index.js b/packages/xgplayer-transmuxer/src/codec/index.js
index fb690872c..a7e6070e8 100644
--- a/packages/xgplayer-transmuxer/src/codec/index.js
+++ b/packages/xgplayer-transmuxer/src/codec/index.js
@@ -1,4 +1,5 @@
export { AVC } from './avc'
export { AAC } from './aac'
+export { VVC } from './vvc'
export { HEVC } from './hevc'
export { NALu } from './nalu'
diff --git a/packages/xgplayer-transmuxer/src/codec/vvc.js b/packages/xgplayer-transmuxer/src/codec/vvc.js
new file mode 100644
index 000000000..51eb538fe
--- /dev/null
+++ b/packages/xgplayer-transmuxer/src/codec/vvc.js
@@ -0,0 +1,484 @@
+
+// import { avc } from 'xgplayer-helper-codec'
+import ExpGolomb from './expGolomb'
+
+
+// bvc2结构体定义
+// aligned(8) class VvcDecoderConfigurationRecord {
+// unsigned int(8) configurationVersion = 1;
+// bit(5) reserved = '0'b;
+// unsigned int(2) lengthSizeMinusOne;
+// unsigned int(1) ptl_present_flag;
+// if (ptl_present_flag) {
+// unsigned int(2) chroma_format_idc;
+// unsigned int(3) bit_depth_minus8;
+// unsigned int(3) numTemporalLayers;
+// unsigned int(2) constantFrameRate;
+// bit(6) reserved = '0'b;
+// VvcPTLRecord(numTemporalLayers) track_ptl;
+// unsigned int(16) output_layer_set_idx;
+// unsigned_int(16) picture_width;
+// unsigned_int(16) picture_height;
+// unsigned int(16) avgFrameRate;
+// }
+// unsigned int(8) numOfArrays;
+// for (j=0; j < numOfArrays; j++) {
+// unsigned int(1) array_completeness;
+// bit(2) reserved = 0;
+// unsigned int(5) NAL_unit_type;
+// unsigned int(16) numNalus;
+// for (i=0; i< numNalus; i++) {
+// unsigned int(16) nalUnitLength;
+// bit(8*nalUnitLength) nalUnit;
+// }
+// }
+// }
+
+// aligned(8) class VvcPTLRecord(num_sublayers) {
+// bit(2) reserved = 0;
+// unsigned int(6) num_bytes_constraint_info;
+// unsigned int(7) general_profile_idc;
+// unsigned int(1) general_tier_flag;
+// unsigned int(8) general_level_idc;
+// unsigned int(1) ptl_frame_only_constraint_flag;
+// unsigned int(1) ptl_multilayer_enabled_flag;
+// unsigned int(8*num_bytes_constraint_info - 2) general_constraint_info;
+// for (i=num_sublayers - 2; i >= 0; i--)
+// unsigned int(1) ptl_sublayer_level_present_flag[i];
+// for (j=num_sublayers; j<=8 && num_sublayers > 1; j++)
+// bit(1) ptl_reserved_zero_bit = 0;
+// for (i=num_sublayers-2; i >= 0; i--)
+// if (ptl_sublayer_level_present[i])
+// unsigned int(8) sublayer_level_idc[i];
+// unsigned int(8) num_sub_profiles;
+// for (j=0; j < num_sub_profiles; j++)
+// unsigned int(32) general_sub_profile_idc[j];
+// }
+
+
+class StreamReader {
+
+ constructor (uint8Arr) {
+ this._buffer = uint8Arr
+ this._offset = 0
+ this._heldBits = 0
+ this._numHeldBits = 0
+ }
+
+ readUint8 () {
+ return this._buffer[this._offset++]
+ }
+
+ readUint16 () {
+ return (this._buffer[this._offset++] << 8) | this._buffer[this._offset++]
+ }
+
+ readUint32 () {
+ return (this._buffer[this._offset++] << 24) |
+ (this._buffer[this._offset++] << 16) |
+ (this._buffer[this._offset++] << 8) |
+ (this._buffer[this._offset++])
+ }
+
+ readUint8Array (len) {
+ const ret = this._buffer.slice(this._offset, this._offset + len)
+ this._offset += len
+ return ret
+ }
+
+ streamRead1Bytes () {
+ this._heldBits = this.readUint8()
+ this._numHeldBits = 1 * 8
+ }
+
+ streamRead2Bytes () {
+ this._heldBits = this.readUint16()
+ this._numHeldBits = 2 * 8
+ }
+
+ extractBits (numBits) {
+ const ret = (this._heldBits >> (this._numHeldBits - numBits)) & ((1 << numBits) - 1)
+ this._numHeldBits -= numBits
+ return ret
+ }
+
+}
+
+export class VVC {
+ static parseVVCDecoderConfigurationRecord (data) {
+
+ const reader = new StreamReader(data)
+ const configurationVersion = reader.readUint8()
+ // VvcDecoderConfigurationRecord
+ reader.streamRead1Bytes()
+ reader.extractBits(5)
+
+ const lengthSizeMinusOne = reader.extractBits(2) + 1
+ const ptlPresentFlag = reader.extractBits(1)
+
+ let olsIdx
+ let numSublayers
+ let constantFrameRate
+ let chromaFormatIdc
+ let bitDepthLumaMinus8
+ let ptlRecord = {}
+ let maxPictrueWidth
+ let maxPictureHeight
+ let avgFrameRate
+
+
+ if (ptlPresentFlag) {
+ reader.streamRead2Bytes()
+
+ chromaFormatIdc = reader.extractBits(2)
+ bitDepthLumaMinus8 = reader.extractBits(3)
+ numSublayers = reader.extractBits(3)
+ constantFrameRate = reader.extractBits(2)
+ reader.extractBits(6) // reserved
+
+ ptlRecord = VVC.parseVVCPTLRecord(reader, numSublayers)
+ olsIdx = reader.readUint16()
+ maxPictrueWidth = reader.readUint16()
+ maxPictureHeight = reader.readUint16()
+ avgFrameRate = reader.readUint16()
+
+ } // end if
+
+ const VVC_NALU_OPI = 12
+ const VVC_NALU_DEC_PARAM = 13
+
+ // const naluArrays= []
+ const numOfArrays = reader.readUint8()
+
+ const vpsArr = []
+ const spsArr = []
+ const ppsArr = []
+ let spsParsed = null
+
+ for (let i = 0; i < numOfArrays; i++) {
+ reader.streamRead1Bytes()
+ reader.extractBits(1)
+
+ reader.extractBits(2)
+ const naluType = reader.extractBits(5)
+
+ let numNalus = 1
+ if (naluType !== VVC_NALU_DEC_PARAM && naluType !== VVC_NALU_OPI) {
+ numNalus = reader.readUint16()
+ }
+
+ for (let j = 0; j < numNalus; j++) {
+ const len = reader.readUint16()
+
+ switch (naluType) {
+ case 14: {
+ vpsArr.push(reader.readUint8Array(len))
+ break
+ }
+ case 15: {
+ const sps = reader.readUint8Array(len)
+ if (!spsParsed) {
+ spsParsed = VVC.parseSPS(VVC.removeEPB(sps))
+ }
+ spsArr.push(sps)
+ break
+ }
+ case 16: {
+ ppsArr.push(reader.readUint8Array(len))
+ break
+ }
+ default:
+ }
+ }
+ }
+
+ const ret = {
+ data,
+ configurationVersion,
+ codec: 'bvc2.1.6.L93.B0',
+ nalUnitSize: lengthSizeMinusOne,
+ ptlPresentFlag,
+ olsIdx,
+ numSublayers,
+ constantFrameRate,
+ chromaFormatIdc,
+ bitDepthLumaMinus8,
+ ptlRecord,
+ width:maxPictrueWidth,
+ height:maxPictureHeight,
+ sampleRate:avgFrameRate,
+ numOfArrays,
+ vps:vpsArr,
+ sps:spsArr,
+ pps:ppsArr,
+ spsParsed
+ }
+
+ // console.log('parseVVCDecoderConfigurationRecord:', data)
+ // console.log(ret)
+
+ return ret
+ }
+
+ static parseVVCPTLRecord (reader, numSublayers) {
+ reader.streamRead2Bytes()
+ reader.extractBits(2)
+ const numBytesConstraintInfo = reader.extractBits(6)
+ const generalProfileIdc = reader.extractBits(7)
+ const generalTierFlag = reader.extractBits(1)
+ const generalLevelIdc = reader.readUint8()
+
+ reader.streamRead1Bytes()
+ const ptlFrameOnlyConstraintFlag = reader.extractBits(1)
+ const ptlMultilayerEnabledFlag = reader.extractBits(1)
+ const generalConstraintInfo = new Uint8Array(numBytesConstraintInfo)
+ if (numBytesConstraintInfo) {
+ for (let i = 0; i < numBytesConstraintInfo - 1; i++) {
+ const cnstr1 = reader.extractBits(6)
+ reader.streamRead1Bytes()
+ const cnstr2 = reader.extractBits(2)
+ generalConstraintInfo[i] = ((cnstr1 << 2) | cnstr2)
+ }
+ generalConstraintInfo[numBytesConstraintInfo - 1] = reader.extractBits(6)
+ } else {
+ reader.extractBits(6)
+ }
+
+ const subLayerLevelIdc = []
+ if (numSublayers > 1) {
+ reader.streamRead1Bytes()
+ let ptlSublayerPresentMask = 0
+
+ for (let j = numSublayers - 2; j >= 0; --j) {
+ const val = reader.extractBits(1)
+ ptlSublayerPresentMask |= val << j
+ }
+
+ for (let j = numSublayers; j <= 8 && numSublayers > 1; ++j) {
+ reader.extractBits(1)
+ }
+
+ for (let j = numSublayers - 2; j >= 0; --j) {
+ if (ptlSublayerPresentMask & (1 << j)) {
+ subLayerLevelIdc[j] = reader.readUint8()
+ }
+ }
+ }
+
+ const ptlNumSubProfiles = reader.readUint8()
+ const generalSubProfileIdc = []
+ if (ptlNumSubProfiles) {
+ for (let i = 0; i < ptlNumSubProfiles; i++) {
+ generalSubProfileIdc.push(reader.readUint32())
+ }
+ }
+
+ return {
+ generalProfileIdc,
+ generalTierFlag,
+ generalLevelIdc,
+ ptlFrameOnlyConstraintFlag,
+ ptlMultilayerEnabledFlag,
+ generalConstraintInfo,
+ subLayerLevelIdc,
+ generalSubProfileIdc,
+ ptlNumSubProfiles,
+ numBytesConstraintInfo
+ }
+
+ }
+
+ static getAvccNals (buffer) {
+ const nals = []
+ while (buffer.position < buffer.length - 4) {
+ const length = buffer.dataview.getInt32(buffer.dataview.position)
+ if (buffer.length - buffer.position >= length) {
+ const header = buffer.buffer.slice(buffer.position, buffer.position + 4)
+ buffer.skip(4)
+ const body = new Uint8Array(buffer.buffer.slice(buffer.position, buffer.position + length))
+ buffer.skip(length)
+ nals.push({header, body})
+ continue
+ }
+ break
+ }
+ return nals
+ }
+
+ static analyseNal (unit) {
+ const type = (unit.body[1] & 0xf8) >> 3
+ unit.type = type
+ switch (type) {
+ case 23:
+ case 24:
+ // try {
+ // unit.sei = SEIParser.parse(unit.body.slice(1))
+ // } catch (e) {}
+ break
+ case 7:
+ case 8:
+ unit.key = true
+ break
+ case 14:
+ unit.vps = true
+ break
+ case 15:
+ unit.sps = true
+ // todo: parse sps
+ break
+ case 16:
+ unit.pps = true
+ break
+ case 17:
+ unit.aps = true
+ break
+ default:
+ }
+ }
+
+ static removeEPB (uint) {
+ const length = uint.byteLength
+ const emulationPreventionBytesPositions = []
+ let i = 1
+
+ while (i < length - 2) {
+ if (uint[i] === 0 && uint[i + 1] === 0 && uint[i + 2] === 0x03) {
+ emulationPreventionBytesPositions.push(i + 2)
+ i += 2
+ } else {
+ i++
+ }
+ }
+
+ if (!emulationPreventionBytesPositions.length) return uint
+
+ const newLength = length - emulationPreventionBytesPositions.length
+ const newData = new Uint8Array(newLength)
+
+ let sourceIndex = 0
+ for (i = 0; i < newLength; sourceIndex++, i++) {
+ if (sourceIndex === emulationPreventionBytesPositions[0]) {
+ sourceIndex++
+ emulationPreventionBytesPositions.shift()
+ }
+ newData[i] = uint[sourceIndex]
+ }
+
+ return newData
+ }
+
+ static parseVps () {
+
+ }
+
+ static parseSPS (sps) {
+ // console.log(sps)
+ const eg = new ExpGolomb(sps)
+
+ eg.readUByte()
+ eg.readUByte()
+
+ eg.skipBits(4)
+
+ const spsVideoParameterSetId = eg.readBits(4)
+ const spsMaxSubLayerMinus1 = eg.readBits(3)
+ const chromaFormatIdc = eg.readBits(2)
+ let chromaFormat = 420
+ if (chromaFormatIdc <= 3) chromaFormat = [0, 420, 422, 444][chromaFormatIdc]
+
+ eg.readBits(2)
+ eg.readBits(1)
+
+
+ const ptlInfo = VVC._parseProfileTierLevel(eg, 1, spsMaxSubLayerMinus1)
+
+ eg.readBits(1)
+ if (eg.readBits(1)) {
+ eg.readBits(1)
+ }
+
+ const width = eg.readUEG()
+ const height = eg.readUEG()
+
+ return {
+ width,
+ height,
+ spsMaxSubLayerMinus1,
+ spsVideoParameterSetId,
+ chromaFormatIdc,
+ chromaFormat,
+ ptlInfo
+ }
+ }
+
+ static _parseProfileTierLevel (eg, ptPresentFlag, spsMaxSubLayerMinus1) {
+ const generalProfileIdc = eg.readBits(7)
+ const generalTierFlag = eg.readBits(1)
+ const generalLevelIdc = eg.readBits(8)
+ const ptlFrameOnlyConstraintFlag = eg.readBits(1)
+ const ptlMultilayerEnabledFlag = eg.readBits(1)
+ let gciInfo
+ if (ptPresentFlag) {
+ gciInfo = VVC._parseGeneralConstraintsInfo(eg)
+ }
+
+ const loop = spsMaxSubLayerMinus1 - 1
+ const ptlSublayerLevelPresentFlags = []
+ const ptlSublayerLevelIdcs = []
+ const ptlSubProfileIdcs = []
+
+ for (let i = loop; i >= 0; --i) {
+ ptlSublayerLevelPresentFlags[i] = eg.readBits(1)
+ }
+
+ while (!eg.byteAligned()) {
+ eg.readBits(1)
+ }
+
+ for (let i = loop; i >= 0; --i) {
+ if (ptlSublayerLevelPresentFlags[i]) {
+ ptlSublayerLevelIdcs[i] = eg.readUByte()
+ }
+ }
+
+ if (ptPresentFlag) {
+ const ptlNumSubProfiles = eg.readUByte()
+ for (let i = 0; i < ptlNumSubProfiles; i++) {
+ ptlSubProfileIdcs[i] = eg.readBits(32)
+ }
+ }
+
+ return {
+ generalProfileIdc,
+ generalTierFlag,
+ generalLevelIdc,
+ ptlFrameOnlyConstraintFlag,
+ ptlMultilayerEnabledFlag,
+ ptlSublayerLevelPresentFlags,
+ ptlSublayerLevelIdcs,
+ ptlSubProfileIdcs,
+ gciInfo
+ }
+
+ }
+
+ static _parseGeneralConstraintsInfo (eg) {
+ const gciPresentFlag = eg.readBits(1)
+
+ if (gciPresentFlag) {
+ eg.skipBits(71)
+ const gciNumReservedBits = eg.readBits(8)
+ if (gciNumReservedBits) {
+ eg.skipBits(gciNumReservedBits)
+ }
+ }
+
+ const zeroBits = 8 - eg.bitsPos() % 8
+ eg.skipBits(zeroBits)
+
+ return {
+ gciPresentFlag
+ }
+ }
+}
+
diff --git a/packages/xgplayer-transmuxer/src/model/types.js b/packages/xgplayer-transmuxer/src/model/types.js
index 48492f2d1..0cf0e0520 100644
--- a/packages/xgplayer-transmuxer/src/model/types.js
+++ b/packages/xgplayer-transmuxer/src/model/types.js
@@ -8,7 +8,8 @@ export const TrackType = {
/** @enum {string} */
export const VideoCodecType = {
AVC: 'avc',
- HEVC: 'hevc'
+ HEVC: 'hevc',
+ VVCC: 'vvcC'
}
/** @enum {string} */
diff --git a/packages/xgplayer-transmuxer/src/mp4/fmp4-demuxer.js b/packages/xgplayer-transmuxer/src/mp4/fmp4-demuxer.js
index 15921b45a..1193e8fdd 100644
--- a/packages/xgplayer-transmuxer/src/mp4/fmp4-demuxer.js
+++ b/packages/xgplayer-transmuxer/src/mp4/fmp4-demuxer.js
@@ -55,7 +55,7 @@ export class FMP4Demuxer {
if (videoTrack.id == k) {
tracks[k].map(x => {
x.offset += baseOffset
- const sample = new VideoSample((x.pts || x.dts) + videoBaseMediaDecodeTime, x.dts + videoBaseMediaDecodeTime)
+ const sample = new VideoSample((typeof x.pts === 'number' ? x.pts : x.dts) + videoBaseMediaDecodeTime, x.dts + videoBaseMediaDecodeTime)
sample.duration = x.duration
sample.gopId = x.gopId
if (x.keyframe) sample.setToKeyframe()
diff --git a/packages/xgplayer-transmuxer/src/mp4/mp4-demuxer.js b/packages/xgplayer-transmuxer/src/mp4/mp4-demuxer.js
index 49d168610..b1f514083 100644
--- a/packages/xgplayer-transmuxer/src/mp4/mp4-demuxer.js
+++ b/packages/xgplayer-transmuxer/src/mp4/mp4-demuxer.js
@@ -4,24 +4,39 @@ import { MP4Parser } from './mp4-parser'
import { Logger } from './logger'
import Crypto from './crypto/crypto'
const NEW_ARRAY_MAX_CNT = 20
+const DELETE_BOX_LIST = ['stts','stsc','stsz','stco','co64','stss', 'ctts']
export class MP4Demuxer {
_videoSamples = []
_audioSamples = []
_lastRemainBuffer = []
_lastRemainBufferStartPos = 0
+ videoMaxFrameIdx = -1
+ audioMaxFrameIdx = -1
constructor (videoSegmnents, audioSegmnents, metadataTrack, options) {
this.videoTrack = new VideoTrack()
this.audioTrack = new AudioTrack()
this.metadataTrack = metadataTrack || new MetadataTrack()
+ this.videoSegmnents = videoSegmnents
+ this.audioSegmnents = audioSegmnents
this.log = new Logger('MP4Demuxer', options && options.openLog ? !options.openLog : true)
-
- videoSegmnents && videoSegmnents.forEach(item => {
- this._videoSamples.push(...item.frames)
- })
- audioSegmnents && audioSegmnents.forEach(item => {
- this._audioSamples.push(...item.frames)
- })
+ this.memoryOpt = options?.memoryOpt
+ if (!this.memoryOpt) {
+ videoSegmnents && videoSegmnents.forEach(item => {
+ this._videoSamples.push(...item.frames)
+ })
+ audioSegmnents && audioSegmnents.forEach(item => {
+ this._audioSamples.push(...item.frames)
+ })
+ }
+ if (this.videoSegmnents?.length > 0) {
+ const lastVideoSegFrames = this.videoSegmnents[this.videoSegmnents.length - 1].frames
+ this.videoMaxFrameIdx = lastVideoSegFrames[lastVideoSegFrames.length - 1].index
+ }
+ if (this.audioSegmnents?.length > 0) {
+ const lastAudioSegFrames = this.audioSegmnents[this.audioSegmnents.length - 1].frames
+ this.audioMaxFrameIdx = lastAudioSegFrames[lastAudioSegFrames.length - 1].index
+ }
}
parseSamples (moov) {
@@ -32,12 +47,8 @@ export class MP4Demuxer {
MP4Parser.moovToTrack(moov, this.videoTrack, this.audioTrack)
this.videoSenc = this.videoTrack.videoSenc
this.audioSenc = this.audioTrack.audioSenc
- }
- if (!this._audioSamples.length && !this._videoSamples.length) {
- const ret = MP4Parser.moovToSamples(moov)
- if (!ret) throw new Error('cannot parse samples from moov box')
- this._videoSamples = ret.videoSamples || []
- this._audioSamples = ret.audioSamples || []
+ // 把不用的释放掉,减少内存占用
+ this.memoryOpt && this.clearBoxEntries(moov)
}
}
@@ -53,17 +64,32 @@ export class MP4Demuxer {
let sample
let sampleData
let startByte
+ let findRes = {}
if (videoIndexRange) {
let frame
let nalSize = 0
+ if (this.memoryOpt && this.videoSegmnents) {
+ findRes = this.getFramePosByIdx('video', videoIndexRange[0])
+ if (!findRes) {
+ throw new Error(`cannot found video frame #${videoIndexRange[0]}`)
+ }
+ }
+ let { frameIdx, segmentIdx} = findRes
for (let i = videoIndexRange[0], l = videoIndexRange[1]; i <= l; i++) {
- sample = this._videoSamples[i]
+ if (!this._videoSamples.length && this.videoSegmnents) {
+ const ret = this.getFrameInfo('video', segmentIdx, frameIdx)
+ sample = ret.sample
+ segmentIdx = ret.segmentIdx
+ frameIdx = ret.frameIdx
+ } else {
+ sample = this._videoSamples[i]
+ }
if (!sample) {
throw new Error(`cannot found video frame #${i}`)
}
startByte = sample.offset - dataStart
sampleData = data.subarray(startByte, startByte + sample.size)
- frame = new VideoSample(sample.pts || sample.dts, sample.dts)
+ frame = new VideoSample(typeof sample.pts === 'number' ? sample.pts : sample.dts, sample.dts)
frame.duration = sample.duration
frame.gopId = sample.gopId
if (sample.keyframe) frame.setToKeyframe()
@@ -80,9 +106,24 @@ export class MP4Demuxer {
videoTrack.baseMediaDecodeTime = videoTrack.samples[0].dts
}
+ findRes = {}
if (audioIndexRange) {
+ if (this.memoryOpt && this.audioSegmnents) {
+ findRes = this.getFramePosByIdx('audio', audioIndexRange[0])
+ if (!findRes) {
+ throw new Error(`cannot found video frame #${audioIndexRange[0]}`)
+ }
+ }
+ let { frameIdx , segmentIdx} = findRes
for (let i = audioIndexRange[0], l = audioIndexRange[1]; i <= l; i++) {
- sample = this._audioSamples[i]
+ if (!this._audioSamples.length && this.audioSegmnents) {
+ const ret = this.getFrameInfo('audio', segmentIdx, frameIdx)
+ sample = ret.sample
+ segmentIdx = ret.segmentIdx
+ frameIdx = ret.frameIdx
+ } else {
+ sample = this._audioSamples[i]
+ }
if (!sample) {
throw new Error(`cannot found video frame #${i}`)
}
@@ -105,7 +146,6 @@ export class MP4Demuxer {
this.videoTrack.useEME = useEME
this.audioTrack.useEME = useEME
- // this.log.debug('[demuxPart start],dataStart,', dataStart, ',dataLen,', data.byteLength, ', lastRemain,', this._lastRemainBuffer ? this._lastRemainBuffer.byteLength : 0)
if (this._lastRemainBuffer && this._lastRemainBuffer.byteLength > 0 && dataStart > this._lastRemainBufferStartPos && dataStart <= this._lastRemainBufferStartPos + this._lastRemainBuffer.byteLength) {
let tryCnt = 0
while (tryCnt < NEW_ARRAY_MAX_CNT) {
@@ -118,7 +158,6 @@ export class MP4Demuxer {
dataStart -= buffer.byteLength
this._lastRemainBuffer = null
this._lastRemainBufferStartPos = 0
- // this.log.debug('[demuxPart add lastRemain],dataStart,', dataStart, ',dataLen,', data.byteLength)
break
} catch (e) {
if (tryCnt < NEW_ARRAY_MAX_CNT) {
@@ -137,33 +176,55 @@ export class MP4Demuxer {
videoTrack.videoSenc = null
audioTrack.audioSenc = null
+ if (videoIndexRange?.[1] > this.videoMaxFrameIdx && this.videoMaxFrameIdx > 0) {
+ videoIndexRange[1] = this.videoMaxFrameIdx
+ }
+ if (audioIndexRange?.[1] > this.audioMaxFrameIdx && this.audioMaxFrameIdx > 0) {
+ audioIndexRange[1] = this.audioMaxFrameIdx
+ }
+
let sample
let sampleData
let startByte
let videoEndByte = 0
let audioEndByte = 0
- if (this._videoSamples.length > 0 && videoIndexRange.length > 0) {
+ let findRes = {}
+ const end = data.byteLength + dataStart
+ if (videoIndexRange.length > 0) {
let frame
- const end = data.byteLength + dataStart
+ if (this.memoryOpt && this.videoSegmnents) {
+ findRes = this.getFramePosByIdx('video', videoIndexRange[0])
+ if (!findRes) {
+ throw new Error(`cannot found video frame #${videoIndexRange[0]}`)
+ }
+ }
+ let { frameIdx, segmentIdx} = findRes
for (let i = videoIndexRange[0]; i <= videoIndexRange[1]; i++) {
- sample = this._videoSamples[i]
+ if (!this._videoSamples.length && this.videoSegmnents) {
+ const ret = this.getFrameInfo('video', segmentIdx, frameIdx)
+ sample = ret.sample
+ segmentIdx = ret.segmentIdx
+ frameIdx = ret.frameIdx
+ } else {
+ sample = this._videoSamples[i]
+ }
if (!sample) {
throw new Error(`cannot found video frame #${i}`)
}
+ if (this.memoryOpt && sample.offset + sample.size > end) {
+ break
+ }
if (sample.offset >= dataStart && sample.offset + sample.size <= end) {
startByte = sample.offset - dataStart
videoEndByte = startByte + sample.size
sampleData = data.subarray(startByte, videoEndByte)
- frame = new VideoSample(sample.pts || sample.dts, sample.dts)
+ frame = new VideoSample(typeof sample.pts === 'number' ? sample.pts : sample.dts, sample.dts)
frame.duration = sample.duration
- // this.log.debug('[video !!!!!!!!],frame,index,', sample.index, ',segmentIdx', segmentIdx, ', dataStart,', dataStart, ',dataEnd', end, ',Samplestart,', sample.offset, ', SampleEnd,', sample.offset + sample.size, ',size,', sample.size, 'dts,', sample.dts, ',pts,', sample.pts, ', keyframe', sample.keyframe)
frame.gopId = sample.gopId
frame.sampleOffset = sample.index
- // frame.timeOffset = sample.timeOffset || sample.timeOffset === 0 ? sample.timeOffset : null
if (sample.keyframe) frame.setToKeyframe()
frame.data = sampleData
frame.size = sample.size
- // this.log.debug('[demux video frame],frame,index,', sample.index, ', size,', sampleData.byteLength, ', hash ', hashVal(sampleData.toString()))
videoTrack.samples.push(frame)
}
}
@@ -172,26 +233,41 @@ export class MP4Demuxer {
videoTrack.baseMediaDecodeTime = videoTrack.samples[0].dts
videoTrack.startPts = videoTrack.samples[0].pts / videoTrack.timescale
videoTrack.endPts = videoTrack.samples[videoTrack.samples.length - 1].pts / videoTrack.timescale
- // this.log.debug('[demux video],frame,startPts,', videoTrack.startPts, ', endPts,', videoTrack.endPts)
+ videoTrack.startDts = videoTrack.samples[0].dts / videoTrack.timescale
+ videoTrack.endDts = videoTrack.samples[videoTrack.samples.length - 1].dts / videoTrack.timescale
if (this.videoSenc) {
videoTrack.videoSenc = this.videoSenc.slice(videoTrack.samples[0].sampleOffset, videoTrack.samples[0].sampleOffset + videoTrack.samples.length)
videoTrack.kidValue = kidValue
}
}
}
- if (this._audioSamples.length > 0 && audioIndexRange.length > 0) {
+ if (audioIndexRange.length > 0) {
+ if (this.memoryOpt && this.audioSegmnents) {
+ findRes = this.getFramePosByIdx('audio', audioIndexRange[0])
+ if (!findRes) {
+ throw new Error(`cannot found video frame #${audioIndexRange[0]}`)
+ }
+ }
+ let { frameIdx, segmentIdx} = findRes
for (let i = audioIndexRange[0]; i <= audioIndexRange[1]; i++) {
- sample = this._audioSamples[i]
+ if (!this._audioSamples.length && this.audioSegmnents) {
+ const ret = this.getFrameInfo('audio', segmentIdx, frameIdx)
+ sample = ret.sample
+ segmentIdx = ret.segmentIdx
+ frameIdx = ret.frameIdx
+ } else {
+ sample = this._audioSamples[i]
+ }
if (!sample) {
throw new Error(`cannot found video frame #${i}`)
}
- if (sample.offset >= dataStart && sample.offset + sample.size <= data.byteLength + dataStart) {
+ if (this.memoryOpt && sample.offset + sample.size > end) {
+ break
+ }
+ if (sample.offset >= dataStart && sample.offset + sample.size <= end) {
startByte = sample.offset - dataStart
audioEndByte = startByte + sample.size
sampleData = data.subarray(startByte, audioEndByte)
- // this.log.debug('[audio !!!!!!!!],audio frame,index,', sample.index, ',segmentIdx', segmentIdx, ', Samplestart,', sample.offset, ', SampleEnd,', sample.offset + sample.size, ',size,', sample.size, 'dts,', sample.dts, ',pts,', sample.pts || sample.dts)
- // frame.gopId = sample.gopId
- // this.log.debug('[demux audio frame],index ,', sample.index, ', size,', sampleData.byteLength, ', hash,', hashVal(sampleData.toString()))
audioTrack.samples.push(new AudioSample(sample.dts, sampleData, sample.duration, sample.index))
}
}
@@ -200,7 +276,6 @@ export class MP4Demuxer {
audioTrack.baseMediaDecodeTime = audioTrack.samples[0].dts
audioTrack.startPts = audioTrack.samples[0].pts / audioTrack.timescale
audioTrack.endPts = audioTrack.samples[audioTrack.samples.length - 1].pts / audioTrack.timescale
- // this.log.debug('[demux audio],frame,startPts,', audioTrack.startPts, ', endPts,', audioTrack.endPts)
if (this.audioSenc) {
audioTrack.audioSenc = this.audioSenc.slice(audioTrack.samples[0].sampleOffset, audioTrack.samples[0].sampleOffset + audioTrack.samples.length)
audioTrack.kidValue = kidValue
@@ -220,7 +295,8 @@ export class MP4Demuxer {
start += nalSize
}
}
- this._lastRemainBuffer = data.subarray(Math.max(videoEndByte, audioEndByte))
+ const usedPos = Math.max(videoEndByte, audioEndByte)
+ this._lastRemainBuffer = data.subarray(usedPos)
if (this._lastRemainBuffer.byteLength > 0) {
this._lastRemainBufferStartPos = dataStart + data.byteLength - this._lastRemainBuffer.byteLength
} else {
@@ -229,13 +305,21 @@ export class MP4Demuxer {
return {
videoTrack,
audioTrack,
- metadataTrack: this.metadataTrack
+ metadataTrack: this.metadataTrack,
+ lastRemainBufferInfo:{
+ data: this._lastRemainBuffer,
+ pos: this._lastRemainBufferStartPos
+ }
}
}
+ setLastRemainBufferInfo (data, startPos) {
+ this._lastRemainBuffer = data
+ this._lastRemainBufferStartPos = startPos
+ }
reset () {
- this._videoSamples = []
- this._audioSamples = []
+ // this._videoSamples = []
+ // this._audioSamples = []
this._lastRemainBuffer = null
this._lastRemainBufferStartPos = 0
this.videoTrack.reset()
@@ -251,4 +335,60 @@ export class MP4Demuxer {
static probe (data) {
return MP4Parser.probe(data)
}
+
+ // 根据帧的index找出起始帧
+ getFramePosByIdx (type, frameIdx) {
+ const trak = type === 'video' ? this.videoSegmnents : this.audioSegmnents
+ if (!trak || !trak?.length) return null
+ let segmentIdx = 0
+ let frames
+ for (let idx = 0; idx < trak.length; idx++) {
+ frames = trak[idx].frames
+ if (frameIdx <= trak[idx].frames?.[frames?.length - 1].index) {
+ segmentIdx = idx
+ break
+ }
+ }
+
+ const findFrameIdx = frames.findIndex(frame => frame.index === frameIdx)
+ return {
+ frameIdx: findFrameIdx,
+ segmentIdx: segmentIdx
+ }
+ }
+
+ getFrameInfo (type, segmentIdx, frameIdx) {
+ const trak = type === 'video' ? this.videoSegmnents : this.audioSegmnents
+ if (!trak) return {}
+ const curSegmentFrameLen = trak[segmentIdx]?.frames?.length
+ if (frameIdx < curSegmentFrameLen) {
+ return {
+ sample:trak[segmentIdx]?.frames[frameIdx],
+ segmentIdx,
+ frameIdx: frameIdx + 1
+ }
+ } else {
+ return {
+ sample:trak[segmentIdx + 1]?.frames[0],
+ segmentIdx: segmentIdx + 1,
+ frameIdx: 1
+ }
+ }
+ }
+
+ clearBoxEntries (moov){
+ // 把不用的释放掉,减少内存占用
+ if (this.memoryOpt) {
+ moov.trak.forEach(trak => {
+ DELETE_BOX_LIST.forEach(type => {
+ const box = trak.mdia.minf.stbl[type]
+ if (box) {
+ box.entries && (box.entries = null)
+ box.entrySizes && (box.entrySizes = null)
+ }
+ })
+ })
+ }
+ }
+
}
diff --git a/packages/xgplayer-transmuxer/src/mp4/mp4-parser.js b/packages/xgplayer-transmuxer/src/mp4/mp4-parser.js
index 9475d0371..176fbc5c0 100644
--- a/packages/xgplayer-transmuxer/src/mp4/mp4-parser.js
+++ b/packages/xgplayer-transmuxer/src/mp4/mp4-parser.js
@@ -1,6 +1,7 @@
+import { AAC, VVC } from '../codec'
import { AudioCodecType, VideoCodecType } from '../model'
-import { getAvcCodec, readBig16, readBig24, readBig32, readBig64 } from '../utils'
-import { AAC } from '../codec'
+import { getAvcCodec, readBig16, readBig24, readBig32, readBig64, readInt32, readInt64 } from '../utils'
+
export class MP4Parser {
static findBox (data, names, start = 0) {
const ret = []
@@ -142,6 +143,7 @@ export class MP4Parser {
return parseBox(box, false, (ret, data, start) => {
ret.tkhd = MP4Parser.tkhd(MP4Parser.findBox(data, ['tkhd'], start)[0])
ret.mdia = MP4Parser.mdia(MP4Parser.findBox(data, ['mdia'], start)[0])
+ ret.edts = MP4Parser.edts(MP4Parser.findBox(data, ['edts'], start)[0])
})
}
@@ -170,6 +172,41 @@ export class MP4Parser {
})
}
+ static edts (box) {
+ return parseBox(box, false, (ret, data, start) => {
+ ret.elst = MP4Parser.elst(MP4Parser.findBox(data, ['elst'], start)[0])
+ })
+ }
+
+ static elst (box) {
+ return parseBox(box, true, (ret, data, start) => {
+ ret.entries = []
+ ret.entriesData = data
+ let offset = 0
+ const entry_count = readBig32(data, offset)
+ offset += 4
+ for (let i = 0; i < entry_count; i++) {
+ const entry = {}
+ ret.entries.push(entry)
+ if (ret.version === 1) {
+ entry.segment_duration = readBig64(data, offset)
+ offset += 8
+ entry.media_time = readInt64(data, offset)
+ offset += 8
+ } else {
+ entry.segment_duration = readBig32(data, offset)
+ offset += 4
+ entry.media_time = readInt32(data, offset)
+ offset += 4
+ }
+ entry.media_rate_integer = readBig16(data, offset)
+ offset += 2
+ entry.media_rate_fraction = readBig16(data, start)
+ offset += 2
+ }
+ })
+ }
+
static mdhd (box) {
return parseBox(box, true, (ret, data) => {
let start = 0
@@ -294,6 +331,28 @@ export class MP4Parser {
})
}
+
+ static bvc2 (box) {
+ return parseBox(box, false, (ret, data, start) => {
+ const bodyStart = parseVisualSampleEntry(ret, data)
+ const bodyData = data.subarray(bodyStart)
+ start += bodyStart
+ ret.vvcC = MP4Parser.bv2C(MP4Parser.findBox(bodyData, ['bv2C'], start)[0])
+ ret.pasp = MP4Parser.pasp(MP4Parser.findBox(bodyData, ['pasp'], start)[0])
+ })
+ }
+
+ static bv2C (box) {
+ return parseBox(box, false, (ret, data, start) => {
+ const record = VVC.parseVVCDecoderConfigurationRecord(data)
+ for (const key in record) {
+ if (Object.prototype.hasOwnProperty.call(record, key)) {
+ ret[key] = record[key]
+ }
+ }
+ })
+ }
+
static stsd (box) {
return parseBox(box, true, (ret, data, start) => {
ret.entryCount = readBig32(data)
@@ -307,6 +366,9 @@ export class MP4Parser {
case 'hvc1':
case 'hev1':
return MP4Parser.hvc1(b)
+ // 266
+ case 'bvc2':
+ return MP4Parser.bvc2(b)
case 'mp4a':
return MP4Parser.mp4a(b)
case 'alaw':
@@ -585,14 +647,14 @@ export class MP4Parser {
static stsc (box) {
return parseBox(box, true, (ret, data) => {
const entryCount = readBig32(data)
- const entries = []
+ const entries = new Array(entryCount)
let start = 4
for (let i = 0; i < entryCount; i++) {
- entries.push({
+ entries[i] = {
firstChunk: readBig32(data, start),
samplesPerChunk: readBig32(data, start + 4),
sampleDescriptionIndex: readBig32(data, start + 8)
- })
+ }
start += 12
}
ret.entryCount = entryCount
@@ -604,11 +666,11 @@ export class MP4Parser {
return parseBox(box, true, (ret, data) => {
const sampleSize = readBig32(data)
const sampleCount = readBig32(data, 4)
- const entrySizes = []
+ const entrySizes = new Array(sampleCount)
if (!sampleSize) {
let start = 8
for (let i = 0; i < sampleCount; i++) {
- entrySizes.push(readBig32(data, start))
+ entrySizes[i] = (readBig32(data, start))
start += 4
}
}
@@ -621,10 +683,10 @@ export class MP4Parser {
static stco (box) {
return parseBox(box, true, (ret, data) => {
const entryCount = readBig32(data)
- const entries = []
+ const entries = new Array(entryCount)
let start = 4
for (let i = 0; i < entryCount; i++) {
- entries.push(readBig32(data, start))
+ entries[i] = (readBig32(data, start))
start += 4
}
ret.entryCount = entryCount
@@ -635,10 +697,11 @@ export class MP4Parser {
static co64 (box) {
return parseBox(box, true, (ret, data) => {
const entryCount = readBig32(data)
- const entries = []
+ const entries = new Array(entryCount)
let start = 4
for (let i = 0; i < entryCount; i++) {
- entries.push(readBig64(data, start))
+ // entries.push(readBig64(data, start))
+ entries[i] = readBig64(data, start)
start += 8
}
ret.entryCount = entryCount
@@ -649,10 +712,10 @@ export class MP4Parser {
static stss (box) {
return parseBox(box, true, (ret, data) => {
const entryCount = readBig32(data)
- const entries = []
+ const entries = new Array(entryCount)
let start = 4
for (let i = 0; i < entryCount; i++) {
- entries.push(readBig32(data, start))
+ entries[i] = readBig32(data, start)
start += 4
}
ret.entryCount = entryCount
@@ -695,8 +758,8 @@ export class MP4Parser {
ret.firstSampleFlags = readBig32(data, offset)
offset += 4
}
- ret.samples = []
if (dataLen > offset) {
+ ret.samples = new Array(sampleCount)
let sample
for (let i = 0; i < sampleCount; i++) {
sample = {}
@@ -720,7 +783,7 @@ export class MP4Parser {
}
offset += 4
}
- ret.samples.push(sample)
+ ret.samples[i] = sample
}
}
})
@@ -766,6 +829,10 @@ export class MP4Parser {
v.mvhdTimecale = moov.mvhd.timescale
v.timescale = v.formatTimescale = vTrack.mdia.mdhd.timescale
v.duration = vTrack.mdia.mdhd.duration || (v.mvhdDurtion / v.mvhdTimecale * v.timescale)
+ if (vTrack.edts?.elst) {
+ v.editList = vTrack.edts.elst
+ v.editListApplied = vTrack.editListApplied
+ }
const e1 = vTrack.mdia.minf.stbl.stsd.entries[0]
v.width = e1.width
v.height = e1.height
@@ -783,6 +850,13 @@ export class MP4Parser {
v.codec = e1.avcC.codec
v.sps = e1.avcC.sps
v.pps = e1.avcC.pps
+ } else if (e1.vvcC) {
+ v.codecType = VideoCodecType.VVCC
+ v.codec = e1.vvcC.codec
+ v.sps = e1.vvcC.sps
+ v.pps = e1.vvcC.pps
+ v.vps = e1.vvcC.vps
+ v.vvcC = e1.vvcC.data
} else {
throw new Error('unknown video stsd entry')
}
@@ -814,6 +888,10 @@ export class MP4Parser {
a.mvhdTimecale = moov.mvhd.timescale
a.timescale = a.formatTimescale = aTrack.mdia.mdhd.timescale
a.duration = aTrack.mdia.mdhd.duration || (a.mvhdDurtion / a.mvhdTimecale * a.timescale)
+ if (aTrack.edts?.elst) {
+ a.editList = aTrack.edts.elst
+ a.editListApplied = aTrack.editListApplied
+ }
const e1 = aTrack.mdia.minf.stbl.stsd.entries[0]
a.sampleSize = e1.sampleSize
a.sampleRate = e1.sampleRate
@@ -916,9 +994,9 @@ export class MP4Parser {
let dts = 0
let gopId = -1
if (!trun.samples.length && trun.sampleCount) {
- ret[tfhd.trackId] = []
+ ret[tfhd.trackId] = new Array(trun.sampleCount)
for (let i = 0; i < trun.sampleCount; i++) {
- ret[tfhd.trackId].push({
+ ret[tfhd.trackId][i] = ({
offset,
dts,
duration: defaultDuration,
diff --git a/packages/xgplayer-transmuxer/src/mp4/mp4.js b/packages/xgplayer-transmuxer/src/mp4/mp4.js
index 89e6780ff..03a70d0bb 100644
--- a/packages/xgplayer-transmuxer/src/mp4/mp4.js
+++ b/packages/xgplayer-transmuxer/src/mp4/mp4.js
@@ -12,6 +12,8 @@ export class MP4 {
'hvcC',
'dinf',
'dref',
+ 'edts',
+ 'elst',
'esds',
'ftyp',
'hdlr',
@@ -60,7 +62,9 @@ export class MP4 {
'schi',
'mehd',
'fiel',
- 'sdtp'
+ 'sdtp',
+ 'bvc2',
+ 'bv2C'
].reduce((p, c) => {
p[c] = [c.charCodeAt(0), c.charCodeAt(1), c.charCodeAt(2), c.charCodeAt(3)]
return p
@@ -165,6 +169,15 @@ export class MP4 {
return ret
}
+ static FullBox (type, version, flags, ...payload) {
+ return MP4.box(type, new Uint8Array([
+ version,
+ (flags >> 16) & 0xff,
+ (flags >> 8) & 0xff,
+ flags & 0xff
+ ]), ...payload)
+ }
+
static ftyp (tracks) {
const isHevc = tracks.find(t => t.type === TrackType.VIDEO && t.codecType === VideoCodecType.HEVC)
return isHevc ? MP4.FTYPHEV1 : MP4.FTYPAVC1
@@ -259,6 +272,7 @@ export class MP4 {
const trak = MP4.box(
MP4.types.trak,
MP4.tkhd(track.id, track.tkhdDuration || 0, track.width, track.height),
+ // track.editList ? MP4.edts(track.editList) : undefined,
MP4.mdia(track)
)
// console.log('[remux],trak, len,', trak.byteLength, track.id, hashVal(trak.toString()))
@@ -295,6 +309,14 @@ export class MP4 {
return tkhd
}
+ static edts (elstData) {
+ return MP4.box(MP4.types.edts, MP4.elst(elstData))
+ }
+
+ static elst ({entries, entriesData, version}) {
+ return MP4.FullBox(MP4.types.elst, version, 0, entriesData)
+ }
+
static mdia (track) {
const mdia = MP4.box(MP4.types.mdia, MP4.mdhd(track.duration, track.timescale), MP4.hdlr(track.type), MP4.minf(track))
// console.log('[remux],mdia, len,', mdia.byteLength, hashVal(mdia.toString()))
@@ -331,7 +353,7 @@ export class MP4 {
static stbl (track) {
const extBox = []
if (track && track.ext) {
- track.ext.stss && extBox.push(MP4.stss(track.ext.stss.entries))
+ // track.ext.stss && extBox.push(MP4.stss(track.ext.stss.entries))
// track.ext.stss && extBox.push(MP4.ctts(track.ext.stss.entries))
}
const stbl = MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.STTS, extBox[0], MP4.STSC, MP4.STSZ, MP4.STCO)
@@ -353,7 +375,7 @@ export class MP4 {
content = MP4.encv(track)
// console.log('[remux],encv, len,', content.byteLength, track.type, hashVal(content.toString()))
} else {
- content = MP4.avc1hev1(track)
+ content = MP4.avc1hev1vvc1(track)
// console.log('[remux],avc1hev1, len,', content.byteLength, track.type, hashVal(content.toString()))
}
const stsd = MP4.box(MP4.types.stsd, new Uint8Array([
@@ -493,10 +515,22 @@ export class MP4 {
return MP4.box(MP4.types.sinf, content, MP4.box(MP4.types.frma, frma), MP4.box(MP4.types.schm, schm), schi)
}
- static avc1hev1 (track) {
- const isHevc = track.codecType === VideoCodecType.HEVC
- const typ = isHevc ? MP4.types.hvc1 : MP4.types.avc1
- const config = isHevc ? MP4.hvcC(track) : MP4.avcC(track)
+ static avc1hev1vvc1 (track) {
+ let config
+ let typ
+ if (track.codecType === VideoCodecType.HEVC) {
+ config = MP4.hvcC(track)
+ typ = MP4.types.hvc1
+ } else if (track.codecType === VideoCodecType.VVCC){
+ config = MP4.vvcC(track)
+ typ = MP4.types.bvc2
+ } else {
+ config = MP4.avcC(track)
+ typ = MP4.types.avc1
+ }
+ // const isHevc = track.codecType === VideoCodecType.HEVC
+ // const typ = isHevc ? MP4.types.hvc1 : MP4.types.avc1
+ // const config = isHevc ? MP4.hvcC(track) : MP4.avcC(track)
const boxes = [
new Uint8Array([
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
@@ -524,7 +558,7 @@ export class MP4 {
]
// console.log('[remux],avc1hev1_0, len,', boxes[0].byteLength, hashVal(boxes[0].toString()))
// console.log('[remux],avc1hev1_1, len,', boxes[1].byteLength, hashVal(boxes[1].toString()))
- if (isHevc) {
+ if (track.codecType === VideoCodecType.HEVC) {
boxes.push(MP4.box(MP4.types.fiel, new Uint8Array([0x01, 0x00])))
// console.log('[remux],fiel, len,', boxes[2].byteLength, hashVal(boxes[2].toString()))
} else if (track.sarRatio && track.sarRatio.length > 1) {
@@ -565,6 +599,11 @@ export class MP4 {
.concat(...pps)))
}
+ static vvcC (track) {
+ const vvcC = track.vvcC
+ return MP4.box(MP4.types.bv2C, new Uint8Array(vvcC))
+ }
+
static hvcC (track) {
const hvcC = track.hvcC
if (hvcC instanceof ArrayBuffer || hvcC instanceof Uint8Array) return hvcC
@@ -767,11 +806,10 @@ export class MP4 {
static traf (track) {
const tfhd = MP4.tfhd(track.id)
- // console.log('[remux],tfhd, len,', tfhd.byteLength, hashVal(tfhd.toString()), ', trackid = ', track.id)
- // console.log('[remux],tfdt,baseMediaDecodeTime,', track.baseMediaDecodeTime)
- const tfdt = MP4.tfdt(track, track.baseMediaDecodeTime)
+ const tfdt = MP4.tfdt(track.baseMediaDecodeTime)
let sencLength = 0
let samples
+
if (track.isVideo && track.videoSenc) {
samples = track.videoSenc
samples.forEach(function (item) {
@@ -783,11 +821,8 @@ export class MP4 {
})
}
track.videoSencLength = sencLength
- // console.log('[remux],tfdt, len,', tfdt.toString().length)
- // console.log('[remux],tfdt, len,', tfdt.byteLength, hashVal(tfdt.toString()))
if (!track.useEME || (!track.isVideoEncryption && !track.isAudioEncryption)) {
const sdtp = MP4.sdtp(track)
- // console.log('[remux],sdtp, len,', sdtp.byteLength, hashVal(sdtp.toString()))
const offset = 16 + // tfhd
20 + // tfdt
8 + // traf header
@@ -836,7 +871,6 @@ export class MP4 {
const senc = MP4.senc(track)
const trun = MP4.trun1(track)
const traf = MP4.box(MP4.types.traf, tfhd, tfdt, sbgp, saiz, saio, senc, trun)
- // console.log('[remux],trex, len,', traf.byteLength, hashVal(traf.toString()))
return traf
}
}
@@ -855,41 +889,36 @@ export class MP4 {
// const ceil = id === 1 ? 12 : 4
const buffer = new Buffer()
const sampleCount = Buffer.writeUint32(data.samples.length)
- let offset = null
- if (data.isVideo) {
- const sencLength = data.videoSencLength
- /*
- 16 + // mfhd
+ const baseOffset =
16 + // tfhd
20 + // tfdt
- 17 + //saiz
- 24 + //saio
- data.samples.length*16
- 4(offset) + 4(sampleCount) + 12(header) //trun
- 12(header) + sencLength //senc
8 + // traf header
+ 16 + // mfhd
8 + // moof header
8 // mdat header
- = 149+data.samples.length * 16 + sencLength
+ let offset = null
+
+ if (data.isVideo) {
+ const sencLength = data.videoSencLength
+ /*
+ 17 + //saiz
+ 24 + //saio
+ data.samples.length*16
+ 4(offset) + 4(sampleCount) + 12(header) //trun
+ 12(header) + sencLength //senc
*/
- offset = Buffer.writeUint32(data.samples.length * 16 + sencLength + 149)
+ offset = Buffer.writeUint32(baseOffset + data.samples.length * 16 + sencLength + 77)
if (!data.isVideoEncryption && data.isAudioEncryption) {
- offset = Buffer.writeUint32(data.samples.length * 16 + 92)
+ offset = Buffer.writeUint32(baseOffset + data.samples.length * 16 + 20)
}
} else {
/*
- 16 + // mfhd
- 16 + // tfhd
- 20 + // tfdt
- 28 + //sbgp
- 4(offset) + 4(sampleCount) + 12(header) //trun
- 8 + // traf header
- 8 + // moof header
- 8 // mdat header
+ 28 + // sbgp
+ 4(offset) + 4(sampleCount) + 12(header) //trun
*/
- let len = data.samples.length * 12 + 124
+ let len = baseOffset + data.samples.length * 12 + 52
if (data.isAudioEncryption) {
- len = data.samples.length * 12 + 8 * data.audioSenc.length + 177
+ len = baseOffset + data.samples.length * 12 + 8 * data.audioSenc.length + 105
}
offset = Buffer.writeUint32(len)
}
@@ -997,32 +1026,21 @@ export class MP4 {
]))
}
- static tfdt (data, baseMediaDecodeTime) {
+ static tfdt (baseMediaDecodeTime) {
const upperWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1))
const lowerWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1))
- if (data.useEME && (data.isVideoEncryption || data.isAudioEncryption)) {
- return MP4.box(MP4.types.tfdt, new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- lowerWordBaseMediaDecodeTime >> 24,
- (lowerWordBaseMediaDecodeTime >> 16) & 0xff,
- (lowerWordBaseMediaDecodeTime >> 8) & 0xff,
- lowerWordBaseMediaDecodeTime & 0xff
- ]))
- } else {
- return MP4.box(MP4.types.tfdt, new Uint8Array([
- 0x01, // version 1
- 0x00, 0x00, 0x00, // flags
- upperWordBaseMediaDecodeTime >> 24,
- (upperWordBaseMediaDecodeTime >> 16) & 0xff,
- (upperWordBaseMediaDecodeTime >> 8) & 0xff,
- upperWordBaseMediaDecodeTime & 0xff,
- lowerWordBaseMediaDecodeTime >> 24,
- (lowerWordBaseMediaDecodeTime >> 16) & 0xff,
- (lowerWordBaseMediaDecodeTime >> 8) & 0xff,
- lowerWordBaseMediaDecodeTime & 0xff
- ]))
- }
+ return MP4.box(MP4.types.tfdt, new Uint8Array([
+ 0x01, // version 1
+ 0x00, 0x00, 0x00, // flags
+ upperWordBaseMediaDecodeTime >> 24,
+ (upperWordBaseMediaDecodeTime >> 16) & 0xff,
+ (upperWordBaseMediaDecodeTime >> 8) & 0xff,
+ upperWordBaseMediaDecodeTime & 0xff,
+ lowerWordBaseMediaDecodeTime >> 24,
+ (lowerWordBaseMediaDecodeTime >> 16) & 0xff,
+ (lowerWordBaseMediaDecodeTime >> 8) & 0xff,
+ lowerWordBaseMediaDecodeTime & 0xff
+ ]))
}
static trun (samples, offset) {
diff --git a/packages/xgplayer-transmuxer/src/utils/index.js b/packages/xgplayer-transmuxer/src/utils/index.js
index 70d16a3e6..c74660c7c 100644
--- a/packages/xgplayer-transmuxer/src/utils/index.js
+++ b/packages/xgplayer-transmuxer/src/utils/index.js
@@ -29,10 +29,20 @@ export function readBig32 (data, i = 0) {
return (data[i] << 24 >>> 0) + (data[i + 1] << 16) + (data[i + 2] << 8) + (data[i + 3] || 0)
}
+export function readInt32 (data, i = 0) {
+ const dv = new DataView(data.buffer, data.byteOffset, data.byteLength)
+ return dv.getInt32(i)
+}
+
export function readBig64 (data, i = 0) {
return readBig32(data, i) * MAX_SIZE + readBig32(data, i + 4)
}
+export function readInt64 (data, i = 0) {
+ const dv = new DataView(data.buffer, data.byteOffset, data.byteLength)
+ return (dv.getUint32(i) << 32) | dv.getUint32(i + 4)
+}
+
export function getAvcCodec (codecs) {
let codec = 'avc1.'
let h
diff --git a/packages/xgplayer/src/plugins/poster/index.js b/packages/xgplayer/src/plugins/poster/index.js
index 84bd1e5c9..6bde913cd 100644
--- a/packages/xgplayer/src/plugins/poster/index.js
+++ b/packages/xgplayer/src/plugins/poster/index.js
@@ -21,7 +21,7 @@ class Poster extends Plugin {
isEndedShow: true, // 是否在播放结束之后显示
hideCanplay: false, // cnaplay 时间大于1的时候才隐藏
poster: '', // 封面图地址
- fillMode: 'fixWidth', // fixWidth / fixHeight / cover / container
+ fillMode: 'fixWidth' // fixWidth / fixHeight / cover / container
}
}
diff --git a/packages/xgplayer/src/plugins/progress/miniProgress.js b/packages/xgplayer/src/plugins/progress/miniProgress.js
index 5c36c8e8f..fd786a8b4 100644
--- a/packages/xgplayer/src/plugins/progress/miniProgress.js
+++ b/packages/xgplayer/src/plugins/progress/miniProgress.js
@@ -36,7 +36,7 @@ class MiniProgress extends Plugin {
const _style = {
cached: getBgColor(commonStyle.cachedColor),
played: getBgColor(commonStyle.playedColor),
- progress: getBgColor(commonStyle.progressColor),
+ progress: getBgColor(commonStyle.progressColor)
}
return `
diff --git a/yarn.lock b/yarn.lock
index 90dea2656..17b54aeb5 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -6360,6 +6360,15 @@ xgplayer-mp4-loader@0.0.1:
xgplayer-streaming-shared "3.0.0-next.2-1"
xgplayer-transmuxer "3.0.0-next.2-1"
+xgplayer-mp4-loader@3.0.11-alpha.10:
+ version "3.0.11-alpha.10"
+ resolved "https://registry.npmjs.org/xgplayer-mp4-loader/-/xgplayer-mp4-loader-3.0.11-alpha.10.tgz#9d3e64d47a66f05c7b88b202241f662600fbb3c3"
+ integrity sha512-8PlqOmPEMoq3I92vTKRlO+jIQkPbcLIgNj0xLXcT1AiaiJIUQU5v0wTp0eueBonB/8g99wCBV2OGwBa0ECJMDg==
+ dependencies:
+ eventemitter3 "^4.0.7"
+ xgplayer-streaming-shared "3.0.11-alpha.10"
+ xgplayer-transmuxer "3.0.11-alpha.10"
+
xgplayer-streaming-shared@3.0.0-next.2-1:
version "3.0.0-next.2-1"
resolved "https://registry.yarnpkg.com/xgplayer-streaming-shared/-/xgplayer-streaming-shared-3.0.0-next.2-1.tgz#357cd2d160f3f12a21dacd8a8fadb7f598639166"
@@ -6376,6 +6385,13 @@ xgplayer-streaming-shared@3.0.0-next.8:
"@babel/runtime" "^7.15.3"
core-js "3"
+xgplayer-streaming-shared@3.0.11-alpha.10:
+ version "3.0.11-alpha.10"
+ resolved "https://registry.yarnpkg.com/xgplayer-streaming-shared/-/xgplayer-streaming-shared-3.0.11-alpha.10.tgz#dca27d03be56d049203fe90b7c692d86e04a6ec6"
+ integrity sha512-qRd6ELCMRGyImxKBeI4tY66CxD54SPeL3oJB3zgboutoAM2ME5toCXp7eg6dEsGUQBc5mIk0HOW6yUVEw3x9Sw==
+ dependencies:
+ eventemitter3 "^4.0.7"
+
xgplayer-transmuxer@3.0.0-next.2-1:
version "3.0.0-next.2-1"
resolved "https://registry.yarnpkg.com/xgplayer-transmuxer/-/xgplayer-transmuxer-3.0.0-next.2-1.tgz#9e2deb51b83d4252bad07b3ae47139df19d43864"
@@ -6392,6 +6408,15 @@ xgplayer-transmuxer@3.0.0-next.8:
"@babel/runtime" "^7.15.3"
core-js "3"
+xgplayer-transmuxer@3.0.11-alpha.10:
+ version "3.0.11-alpha.10"
+ resolved "https://registry.npmjs.org/xgplayer-transmuxer/-/xgplayer-transmuxer-3.0.11-alpha.10.tgz#8f4a461f672b11107d07e46d98b77fc2490227e8"
+ integrity sha512-QbUkF4m1KXt+JOeTwM0AO/VZN6g697thGZjYMPkoXjxIU4I0F8lybc46Ng+0bZn2aKci54gAE660xsyiHFEUkA==
+ dependencies:
+ "@babel/runtime" "^7.15.3"
+ concat-typed-array "^1.0.2"
+ crypto-es "^1.2.4"
+
xml-name-validator@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-4.0.0.tgz#79a006e2e63149a8600f15430f0a4725d1524835"