Browse Source

fix: Correctly use videoSegmentTimingInfo from the transmuxer (#601)

This was caused by a bad merge of https://github.com/videojs/http-streaming/pull/371 into the lhls/tba branch, which is now master.
pull/643/head
Brandon Casey 6 years ago
committed by GitHub
parent
commit
857eb4c2c7
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 16
      src/media-segment-request.js
  2. 16
      src/segment-loader.js
  3. 4
      src/segment-transmuxer.js
  4. 28
      src/source-updater.js
  5. 17
      src/transmuxer-worker.js
  6. 8
      test/segment-transmuxer.test.js
  7. 12
      test/source-updater.test.js
  8. 22
      test/videojs-http-streaming.test.js

16
src/media-segment-request.js

@ -238,6 +238,7 @@ const transmuxAndNotify = ({
isPartial,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -318,6 +319,9 @@ const transmuxAndNotify = ({
videoEndFn(videoTimingInfo.end);
}
},
onVideoSegmentTimingInfo: (videoSegmentTimingInfo) => {
videoSegmentTimingInfoFn(videoSegmentTimingInfo);
},
onId3: (id3Frames, dispatchType) => {
id3Fn(segment, id3Frames, dispatchType);
},
@ -343,6 +347,7 @@ const handleSegmentBytes = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -431,6 +436,7 @@ const handleSegmentBytes = ({
isPartial,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -456,6 +462,7 @@ const decryptSegment = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -479,6 +486,7 @@ const decryptSegment = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -534,6 +542,7 @@ const waitForCompletion = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -579,6 +588,7 @@ const waitForCompletion = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -593,6 +603,7 @@ const waitForCompletion = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -620,6 +631,7 @@ const handleProgress = ({
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -645,6 +657,7 @@ const handleProgress = ({
isPartial: true,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn
@ -725,6 +738,7 @@ export const mediaSegmentRequest = ({
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -738,6 +752,7 @@ export const mediaSegmentRequest = ({
captionParser,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,
@ -804,6 +819,7 @@ export const mediaSegmentRequest = ({
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
id3Fn,
captionsFn,
dataFn,

16
src/segment-loader.js

@ -1555,10 +1555,7 @@ export default class SegmentLoader extends videojs.EventTarget {
segments
});
const videoSegmentTimingInfoCallback =
this.handleVideoSegmentTimingInfo_.bind(this, segmentInfo.requestId);
this.sourceUpdater_.appendBuffer({segmentInfo, type, bytes, videoSegmentTimingInfoCallback}, (error) => {
this.sourceUpdater_.appendBuffer({type, bytes}, (error) => {
if (error) {
this.error(`appenderror for ${type} append with ${bytes.length} bytes`);
// If an append errors, we can't recover.
@ -1570,7 +1567,7 @@ export default class SegmentLoader extends videojs.EventTarget {
});
}
handleVideoSegmentTimingInfo_(requestId, event) {
handleVideoSegmentTimingInfo_(requestId, videoSegmentTimingInfo) {
if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
return;
}
@ -1582,14 +1579,14 @@ export default class SegmentLoader extends videojs.EventTarget {
}
segment.videoTimingInfo.transmuxerPrependedSeconds =
event.videoSegmentTimingInfo.prependedContentDuration || 0;
videoSegmentTimingInfo.prependedContentDuration || 0;
segment.videoTimingInfo.transmuxedPresentationStart =
event.videoSegmentTimingInfo.start.presentation;
videoSegmentTimingInfo.start.presentation;
segment.videoTimingInfo.transmuxedPresentationEnd =
event.videoSegmentTimingInfo.end.presentation;
videoSegmentTimingInfo.end.presentation;
// mainly used as a reference for debugging
segment.videoTimingInfo.baseMediaDecodeTime =
event.videoSegmentTimingInfo.baseMediaDecodeTime;
videoSegmentTimingInfo.baseMediaDecodeTime;
}
appendData_(segmentInfo, result) {
@ -1658,6 +1655,7 @@ export default class SegmentLoader extends videojs.EventTarget {
progressFn: this.handleProgress_.bind(this),
trackInfoFn: this.handleTrackInfo_.bind(this),
timingInfoFn: this.handleTimingInfo_.bind(this),
videoSegmentTimingInfoFn: this.handleVideoSegmentTimingInfo_.bind(this, segmentInfo.requestId),
captionsFn: this.handleCaptions_.bind(this),
id3Fn: this.handleId3_.bind(this),

4
src/segment-transmuxer.js

@ -72,6 +72,7 @@ export const processTransmux = ({
onTrackInfo,
onAudioTimingInfo,
onVideoTimingInfo,
onVideoSegmentTimingInfo,
onId3,
onCaptions,
onDone
@ -102,6 +103,9 @@ export const processTransmux = ({
if (event.data.action === 'videoTimingInfo') {
onVideoTimingInfo(event.data.videoTimingInfo);
}
if (event.data.action === 'videoSegmentTimingInfo') {
onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
}
if (event.data.action === 'id3Frame') {
onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
}

28
src/source-updater.js

@ -272,30 +272,12 @@ export default class SourceUpdater extends videojs.EventTarget {
* @param {Function} done the function to call when done
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
*/
appendBuffer({segmentInfo, type, bytes, videoSegmentTimingInfoCallback}, doneFn) {
this.processedAppend_ = true;
const originalAction = actions.appendBuffer(bytes, segmentInfo || {mediaIndex: -1});
const originalDoneFn = doneFn;
let action = originalAction;
if (videoSegmentTimingInfoCallback) {
action = (_type, sourceUpdater) => {
if (_type === 'video' && this.videoBuffer) {
this.videoBuffer.addEventListener('videoSegmentTimingInfo', videoSegmentTimingInfoCallback);
}
originalAction(type, sourceUpdater);
};
doneFn = (err) => {
if (this.videoBuffer) {
this.videoBuffer.removeEventListener('videoSegmentTimingInfo', videoSegmentTimingInfoCallback);
}
originalDoneFn(err);
};
}
appendBuffer(options, doneFn) {
const {segmentInfo, type, bytes} = options;
this.processedAppend_ = true;
if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
this.delayedAudioAppendQueue_.push([{type, bytes, videoSegmentTimingInfoCallback}, doneFn]);
this.delayedAudioAppendQueue_.push([options, doneFn]);
this.logger_(`delayed audio append of ${bytes.length} until video append`);
return;
}
@ -303,7 +285,7 @@ export default class SourceUpdater extends videojs.EventTarget {
pushQueue({
type,
sourceUpdater: this,
action,
action: actions.appendBuffer(bytes, segmentInfo || {mediaIndex: -1}),
doneFn,
name: 'appendBuffer'
});

17
src/transmuxer-worker.js

@ -71,7 +71,22 @@ const wireFullTransmuxerEvents = function(self, transmuxer) {
});
});
transmuxer.on('videoSegmentTimingInfo', function(videoSegmentTimingInfo) {
transmuxer.on('videoSegmentTimingInfo', function(timingInfo) {
const videoSegmentTimingInfo = {
start: {
decode: videoTsToSeconds(timingInfo.start.dts),
presentation: videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: videoTsToSeconds(timingInfo.end.dts),
presentation: videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
videoSegmentTimingInfo.prependedContentDuration = videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'videoSegmentTimingInfo',
videoSegmentTimingInfo

8
test/segment-transmuxer.test.js

@ -67,6 +67,7 @@ QUnit.test('transmux returns data for full appends', function(assert) {
const trackInfoFn = sinon.spy();
const audioTimingFn = sinon.spy();
const videoTimingFn = sinon.spy();
const videoSegmentTimingInfoFn = sinon.spy();
this.transmuxer = createTransmuxer(false);
@ -80,6 +81,7 @@ QUnit.test('transmux returns data for full appends', function(assert) {
onTrackInfo: trackInfoFn,
onAudioTimingInfo: audioTimingFn,
onVideoTimingInfo: videoTimingFn,
onVideoSegmentTimingInfo: videoSegmentTimingInfoFn,
onId3: noop,
onCaptions: noop,
onDone: () => {
@ -87,6 +89,7 @@ QUnit.test('transmux returns data for full appends', function(assert) {
assert.ok(trackInfoFn.callCount, 'got trackInfo events');
assert.ok(audioTimingFn.callCount, 'got audioTimingInfo events');
assert.ok(videoTimingFn.callCount, 'got videoTimingInfo events');
assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfo events');
done();
}
});
@ -109,6 +112,7 @@ QUnit.test('transmux returns captions for full appends', function(assert) {
onTrackInfo: noop,
onAudioTimingInfo: noop,
onVideoTimingInfo: noop,
onVideoSegmentTimingInfo: noop,
onId3: noop,
onCaptions: captionsFn,
onDone: () => {
@ -125,6 +129,7 @@ QUnit.test('transmux returns data for partial appends', function(assert) {
const trackInfoFn = sinon.spy();
const audioTimingFn = sinon.spy();
const videoTimingFn = sinon.spy();
const videoSegmentTimingInfoFn = sinon.spy();
this.transmuxer = createTransmuxer(true);
@ -136,6 +141,8 @@ QUnit.test('transmux returns data for partial appends', function(assert) {
isPartial: true,
onData: () => {
dataFn();
// TODO: parial appends don't current fire this
// assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfoFn event');
assert.ok(trackInfoFn.callCount, 'got trackInfo event');
assert.ok(videoTimingFn.callCount, 'got videoTimingInfo event');
@ -147,6 +154,7 @@ QUnit.test('transmux returns data for partial appends', function(assert) {
onTrackInfo: trackInfoFn,
onAudioTimingInfo: audioTimingFn,
onVideoTimingInfo: videoTimingFn,
onVideoSegmentTimingInfo: videoSegmentTimingInfoFn,
onId3: noop,
onCaptions: noop,
// This will be called on partialdone events,

12
test/source-updater.test.js

@ -6,7 +6,7 @@ import SourceUpdater from '../src/source-updater';
import {mp4VideoInit, mp4AudioInit, mp4Video, mp4Audio} from './dist/test-segments';
import { timeRangesEqual } from './custom-assertions.js';
const checkIntialDuration = function({duration}) {
const checkInitialDuration = function({duration}) {
// ie sometimes sets duration to infinity earlier then expected
if (videojs.browser.IS_EDGE || videojs.browser.IE_VERSION) {
QUnit.assert.ok(Number.isNaN(duration) || !Number.isFinite(duration), 'starting duration as expected');
@ -753,7 +753,7 @@ QUnit.test(
video: 'avc1.4D001E'
});
checkIntialDuration(this.mediaSource);
checkInitialDuration(this.mediaSource);
this.sourceUpdater.setDuration(11);
assert.equal(this.mediaSource.duration, 11, 'set duration on media source');
}
@ -780,7 +780,7 @@ QUnit[testOrSkip]('setDuration waits for audio buffer to finish updating', funct
done();
});
checkIntialDuration(this.mediaSource);
checkInitialDuration(this.mediaSource);
assert.ok(this.sourceUpdater.updating(), 'updating during appends');
});
@ -806,7 +806,7 @@ QUnit.test('setDuration waits for video buffer to finish updating', function(ass
done();
});
checkIntialDuration(this.mediaSource);
checkInitialDuration(this.mediaSource);
assert.ok(this.sourceUpdater.updating(), 'updating during appends');
});
@ -848,7 +848,7 @@ QUnit.test(
assert.equal(this.mediaSource.duration, 11, 'set duration on media source');
});
checkIntialDuration(this.mediaSource);
checkInitialDuration(this.mediaSource);
assert.ok(this.sourceUpdater.updating(), 'updating during appends');
}
);
@ -893,7 +893,7 @@ QUnit.test(
done();
});
checkIntialDuration(this.mediaSource);
checkInitialDuration(this.mediaSource);
}
);

22
test/videojs-http-streaming.test.js

@ -4192,22 +4192,18 @@ QUnit.test('convertToProgramTime will return stream time if buffered', function(
this.standardXHRResponse(this.requests[1]);
const mpc = this.player.vhs.masterPlaylistController_;
const mainSegmentLoader_ = mpc.mainSegmentLoader_;
mpc.mainSegmentLoader_.one('appending', () => {
const videoBuffer = mpc.sourceUpdater_.videoBuffer;
mainSegmentLoader_.one('appending', () => {
// since we don't run through the transmuxer, we have to manually trigger the timing
// info callback
videoBuffer.trigger({
type: 'videoSegmentTimingInfo',
videoSegmentTimingInfo: {
prependedGopDuration: 0,
start: {
presentation: 0
},
end: {
presentation: 1
}
mainSegmentLoader_.handleVideoSegmentTimingInfo_(mainSegmentLoader_.pendingSegment_.requestId, {
prependedGopDuration: 0,
start: {
presentation: 0
},
end: {
presentation: 1
}
});
});

Loading…
Cancel
Save