diff --git a/index.d.ts b/index.d.ts
index 3638092773..cdbe0a2367 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -1043,6 +1043,7 @@ declare namespace dashjs {
},
text?: {
defaultEnabled?: boolean,
+ dispatchForManualRendering?: boolean,
extendSegmentedCues?: boolean,
imsc?: {
displayForcedOnlyMode?: boolean,
@@ -1533,6 +1534,8 @@ declare namespace dashjs {
CAPTION_RENDERED: 'captionRendered';
CAPTION_CONTAINER_RESIZE: 'captionContainerResize';
CONFORMANCE_VIOLATION: 'conformanceViolation';
+ CUE_ENTER: 'cueEnter';
+ CUE_EXIT: 'cueExit';
DVB_FONT_DOWNLOAD_ADDED: 'dvbFontDownloadAdded';
DVB_FONT_DOWNLOAD_COMPLETE: 'dvbFontDownloadComplete';
DVB_FONT_DOWNLOAD_FAILED: 'dvbFontDownloadFailed';
@@ -1982,6 +1985,19 @@ declare namespace dashjs {
content: object;
}
+ export interface CueEnterEvent extends Event {
+ type: MediaPlayerEvents['CUE_ENTER'];
+ id: string,
+ text: string,
+ start: number,
+ end: number
+ }
+
+ export interface CueExitEvent extends Event {
+ type: MediaPlayerEvents['CUE_EXIT'];
+ id: string,
+ }
+
export interface AdaptationSetRemovedNoCapabilitiesEvent extends Event {
type: MediaPlayerEvents['ADAPTATION_SET_REMOVED_NO_CAPABILITIES'];
adaptationSet: object;
diff --git a/samples/captioning/events.html b/samples/captioning/events.html
new file mode 100644
index 0000000000..54abc4175b
--- /dev/null
+++ b/samples/captioning/events.html
@@ -0,0 +1,216 @@
+
+
+
+
+ Captions Event Sample
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Subtitle Event Handling
+
+ Example showing how to consume subtitle events raised by
+ dash.js. This way you can render the subtitles yourself.
+
+
+
+
Current Subtitle
+
+
+
+
+
+
+
+
+
+
+
+
+
+
00:00:00
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
00:00:00
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/samples.json b/samples/samples.json
index 403fe7f30c..7f1beb7c7b 100644
--- a/samples/samples.json
+++ b/samples/samples.json
@@ -512,6 +512,19 @@
"Video",
"Audio"
]
+ },
+ {
+ "title": "Subtitle Event Handling",
+ "description": "Example showing how to consume subtitle events raised by dash.js. This way you can render the subtitles yourself.",
+ "href": "captioning/events.html",
+ "image": "lib/img/sintel-1.jpg",
+ "labels": [
+ "VoD",
+ "External caption",
+ "Video",
+ "Audio",
+ "Events"
+ ]
}
]
},
diff --git a/src/core/Settings.js b/src/core/Settings.js
index 59ad6982c7..7e4f66d6a7 100644
--- a/src/core/Settings.js
+++ b/src/core/Settings.js
@@ -142,6 +142,7 @@ import Events from './events/Events';
* },
* text: {
* defaultEnabled: true,
+ * dispatchForManualRendering: false,
* extendSegmentedCues: true,
* imsc: {
* displayForcedOnlyMode: false,
@@ -482,6 +483,8 @@ import Events from './events/Events';
* @typedef {Object} Text
* @property {boolean} [defaultEnabled=true]
* Enable/disable subtitle rendering by default.
+ * @property {boolean} [dispatchForManualRendering=false]
+ * Enable/disable firing of CueEnter/CueExt events. This will disable the display of subtitles and should be used when you want to have full control about rendering them.
* @property {boolean} [extendSegmentedCues=true]
* Enable/disable patching of segmented cues in order to merge as a single cue by extending cue end time.
* @property {boolean} [imsc.displayForcedOnlyMode=false]
@@ -951,6 +954,7 @@ function Settings() {
},
text: {
defaultEnabled: true,
+ dispatchForManualRendering: false,
extendSegmentedCues: true,
imsc: {
displayForcedOnlyMode: false,
diff --git a/src/streaming/MediaPlayerEvents.js b/src/streaming/MediaPlayerEvents.js
index 0e9592ca15..49acb95f59 100644
--- a/src/streaming/MediaPlayerEvents.js
+++ b/src/streaming/MediaPlayerEvents.js
@@ -259,6 +259,18 @@ class MediaPlayerEvents extends EventsBase {
*/
this.TEXT_TRACK_ADDED = 'textTrackAdded';
+ /**
+ * Triggered when a text track should be shown
+ * @event MediaPlayerEvents#CUE_ENTER
+ */
+ this.CUE_ENTER = 'cueEnter'
+
+ /**
+ * Triggered when a text track should be hidden
+ * @event MediaPlayerEvents#CUE_ENTER
+ */
+ this.CUE_EXIT = 'cueExit'
+
/**
* Triggered when a throughput measurement based on the last segment request has been stored
* @event MediaPlayerEvents#THROUGHPUT_MEASUREMENT_STORED
diff --git a/src/streaming/text/TextController.js b/src/streaming/text/TextController.js
index 4c32ae61a6..1ed6669362 100644
--- a/src/streaming/text/TextController.js
+++ b/src/streaming/text/TextController.js
@@ -181,7 +181,7 @@ function TextController(config) {
/**
* Event that is triggered if a font download of a font described in an essential property descriptor
- * tag fails.
+ * tag fails.
* @param {FontInfo} font - font information
* @private
*/
@@ -194,9 +194,9 @@ function TextController(config) {
};
/**
- * Set a font with an essential property
+ * Set a font with an essential property
* @private
- */
+ */
function _onFontDownloadSuccess(font) {
logger.debug(`Successfully downloaded ${font.isEssential ? 'an essential' : 'a'} font - fontFamily: ${font.fontFamily}, url: ${font.url}`);
if (font.isEssential) {
@@ -353,13 +353,14 @@ function TextController(config) {
if (currentNativeTrackInfo && (currentNativeTrackInfo.mode !== Constants.TEXT_DISABLED)) {
textTracks[streamId].setModeForTrackIdx(oldTrackIdx, Constants.TEXT_HIDDEN);
}
-
+
textTracks[streamId].setCurrentTrackIdx(idx);
currentTrackInfo = textTracks[streamId].getCurrentTrackInfo();
- currentNativeTrackInfo = (currentTrackInfo) ? videoModel.getTextTrack(currentTrackInfo.kind, currentTrackInfo.id, currentTrackInfo.lang, currentTrackInfo.isTTML, currentTrackInfo.isEmbedded) : null;
- if (currentTrackInfo && (currentTrackInfo.mode !== Constants.TEXT_DISABLED)) {
+ const dispatchForManualRendering = settings.get().streaming.text.dispatchForManualRendering;
+
+ if (currentTrackInfo && !dispatchForManualRendering && (currentTrackInfo.mode !== Constants.TEXT_DISABLED)) {
textTracks[streamId].setModeForTrackIdx(idx, Constants.TEXT_SHOWING);
}
diff --git a/src/streaming/text/TextTracks.js b/src/streaming/text/TextTracks.js
index a87db21017..cc7ee393ec 100644
--- a/src/streaming/text/TextTracks.js
+++ b/src/streaming/text/TextTracks.js
@@ -80,7 +80,8 @@ function TextTracks(config) {
previousISDState,
topZIndex,
resizeObserver,
- hasRequestAnimationFrame;
+ hasRequestAnimationFrame,
+ currentCaptionEventCue;
function setup() {
logger = Debug(context).getInstance().getLogger(instance);
@@ -138,6 +139,7 @@ function TextTracks(config) {
}
function createTracks() {
+ const dispatchForManualRendering = settings.get().streaming.text.dispatchForManualRendering;
//Sort in same order as in manifest
textTrackQueue.sort(function (a, b) {
@@ -195,7 +197,7 @@ function TextTracks(config) {
for (let idx = 0; idx < textTrackQueue.length; idx++) {
const videoTextTrack = getTrackByIdx(idx);
if (videoTextTrack) {
- videoTextTrack.mode = (idx === defaultIndex) ? Constants.TEXT_SHOWING : Constants.TEXT_HIDDEN;
+ videoTextTrack.mode = (idx === defaultIndex && !dispatchForManualRendering) ? Constants.TEXT_SHOWING : Constants.TEXT_HIDDEN;
videoTextTrack.manualMode = (idx === defaultIndex) ? Constants.TEXT_SHOWING : Constants.TEXT_HIDDEN;
}
}
@@ -412,16 +414,16 @@ function TextTracks(config) {
function _renderCaption(cue) {
if (captionContainer) {
clearCaptionContainer.call(this);
-
+
const finalCue = document.createElement('div');
captionContainer.appendChild(finalCue);
-
+
previousISDState = renderHTML(
- cue.isd,
- finalCue,
- function (src) { return _resolveImageSrc(cue, src) },
- captionContainer.clientHeight,
- captionContainer.clientWidth,
+ cue.isd,
+ finalCue,
+ function (src) { return _resolveImageSrc(cue, src) },
+ captionContainer.clientHeight,
+ captionContainer.clientWidth,
settings.get().streaming.text.imsc.displayForcedOnlyMode,
function (err) { logger.info('renderCaption :', err) /*TODO: add ErrorHandler management*/ },
previousISDState,
@@ -434,7 +436,7 @@ function TextTracks(config) {
// Check that a new cue immediately follows the previous cue
function _areCuesAdjacent(cue, prevCue) {
- if (!prevCue) {
+ if (!prevCue) {
return false;
}
// Check previous cue endTime with current cue startTime
@@ -450,7 +452,7 @@ function TextTracks(config) {
if (!_cuesContentAreEqual(prevCue, cue, CUE_PROPS_TO_COMPARE)) {
return false;
- }
+ }
prevCue.endTime = Math.max(prevCue.endTime, cue.endTime);
return true;
@@ -483,6 +485,7 @@ function TextTracks(config) {
*/
function addCaptions(trackIdx, timeOffset, captionData) {
const track = getTrackByIdx(trackIdx);
+ const dispatchForManualRendering = settings.get().streaming.text.dispatchForManualRendering;
if (!track) {
return;
@@ -493,16 +496,22 @@ function TextTracks(config) {
}
for (let item = 0; item < captionData.length; item++) {
- let cue;
+ let cue = null;
const currentItem = captionData[item];
track.cellResolution = currentItem.cellResolution;
track.isFromCEA608 = currentItem.isFromCEA608;
if (!isNaN(currentItem.start) && !isNaN(currentItem.end)) {
- cue = currentItem.type === 'html' && captionContainer ? _handleHtmlCaption(currentItem, timeOffset, track)
- : currentItem.data ? _handleNonHtmlCaption(currentItem, timeOffset, track) : null;
+ if (dispatchForManualRendering) {
+ cue = _handleCaptionEvents(currentItem, timeOffset);
+ } else if (_isHTMLCue(currentItem) && captionContainer) {
+ cue = _handleHtmlCaption(currentItem, timeOffset, track)
+ } else if (currentItem.data) {
+ cue = _handleNonHtmlCaption(currentItem, timeOffset, track)
+ }
}
+
try {
if (cue) {
if (!cueInTrack(track, cue)) {
@@ -520,7 +529,7 @@ function TextTracks(config) {
if (_areCuesAdjacent(cue, prevCue)) {
if (!_extendLastCue(cue, prevCue)) {
- /* If cues are adjacent but not identical (extended), let the render function of the next cue
+ /* If cues are adjacent but not identical (extended), let the render function of the next cue
* clear up the captionsContainer so removal and appending are instantaneous.
* Only do this for imsc subs (where isd is present).
*/
@@ -557,35 +566,50 @@ function TextTracks(config) {
}
}
+ function _handleCaptionEvents(currentItem, timeOffset) {
+ let cue = _getCueInformation(currentItem, timeOffset)
+
+ cue.onenter = function () {
+ // HTML Tracks don't trigger the onexit event when a new cue is entered,
+ // we need to manually trigger it
+ if (_isHTMLCue(currentItem) && currentCaptionEventCue && currentCaptionEventCue.cueID !== cue.cueID) {
+ _triggerCueExit(currentCaptionEventCue);
+ }
+ currentCaptionEventCue = cue;
+ _triggerCueEnter(cue);
+ }
+
+ cue.onexit = function () {
+ _triggerCueExit(cue);
+ currentCaptionEventCue = null;
+ }
+
+ return cue;
+ }
+
+ function _triggerCueEnter(cue) {
+ eventBus.trigger(MediaPlayerEvents.CUE_ENTER, cue);
+ }
+
+ function _triggerCueExit(cue) {
+ eventBus.trigger(MediaPlayerEvents.CUE_EXIT, {
+ cueID: cue.cueID
+ });
+ }
+
function _handleHtmlCaption(currentItem, timeOffset, track) {
const self = this;
- let cue = new Cue(currentItem.start + timeOffset, currentItem.end + timeOffset, '');
- cue.cueHTMLElement = currentItem.cueHTMLElement;
- cue.isd = currentItem.isd;
- cue.images = currentItem.images;
- cue.embeddedImages = currentItem.embeddedImages;
- cue.cueID = currentItem.cueID;
- cue.scaleCue = _scaleCue.bind(self);
- //useful parameters for cea608 subtitles, not for TTML one.
- cue.cellResolution = currentItem.cellResolution;
- cue.lineHeight = currentItem.lineHeight;
- cue.linePadding = currentItem.linePadding;
- cue.fontSize = currentItem.fontSize;
+ let cue = _getCueInformation(currentItem, timeOffset)
captionContainer.style.left = actualVideoLeft + 'px';
captionContainer.style.top = actualVideoTop + 'px';
captionContainer.style.width = actualVideoWidth + 'px';
captionContainer.style.height = actualVideoHeight + 'px';
- // Resolve images sources
- if (cue.isd) {
- _resolveImagesInContents(cue, cue.isd.contents);
- }
-
cue.onenter = function () {
if (track.mode === Constants.TEXT_SHOWING) {
if (this.isd) {
- if (hasRequestAnimationFrame) {
+ if (hasRequestAnimationFrame) {
// Ensure everything in _renderCaption happens in the same frame
requestAnimationFrame(() => _renderCaption(this));
} else {
@@ -621,9 +645,7 @@ function TextTracks(config) {
}
function _handleNonHtmlCaption(currentItem, timeOffset, track) {
- let cue = new Cue(currentItem.start - timeOffset, currentItem.end - timeOffset, currentItem.data);
-
- cue.cueID = `${cue.startTime}_${cue.endTime}`;
+ let cue = _getCueInformation(currentItem, timeOffset)
cue.isActive = false;
if (currentItem.styles) {
@@ -657,6 +679,46 @@ function TextTracks(config) {
return cue;
}
+ function _isHTMLCue(cue) {
+ return (cue.type === 'html')
+ }
+
+ function _getCueInformation(currentItem, timeOffset) {
+ if (_isHTMLCue(currentItem)) {
+ return _getCueInformationForHtml(currentItem, timeOffset);
+ }
+
+ return _getCueInformationForNonHtml(currentItem, timeOffset);
+ }
+
+ function _getCueInformationForHtml(currentItem, timeOffset) {
+ let cue = new Cue(currentItem.start + timeOffset, currentItem.end + timeOffset, '');
+ cue.cueHTMLElement = currentItem.cueHTMLElement;
+ cue.isd = currentItem.isd;
+ cue.images = currentItem.images;
+ cue.embeddedImages = currentItem.embeddedImages;
+ cue.cueID = currentItem.cueID;
+ cue.scaleCue = _scaleCue.bind(self);
+ //useful parameters for cea608 subtitles, not for TTML one.
+ cue.cellResolution = currentItem.cellResolution;
+ cue.lineHeight = currentItem.lineHeight;
+ cue.linePadding = currentItem.linePadding;
+ cue.fontSize = currentItem.fontSize;
+
+ // Resolve images sources
+ if (cue.isd) {
+ _resolveImagesInContents(cue, cue.isd.contents);
+ }
+
+ return cue;
+ }
+
+ function _getCueInformationForNonHtml(currentItem, timeOffset) {
+ let cue = new Cue(currentItem.start - timeOffset, currentItem.end - timeOffset, currentItem.data);
+ cue.cueID = `${cue.startTime}_${cue.endTime}`;
+ return cue;
+ }
+
function manualCueProcessing(time) {
const activeTracks = _getManualActiveTracks();
@@ -664,17 +726,24 @@ function TextTracks(config) {
const targetTrack = activeTracks[0];
const cues = targetTrack.manualCueList;
-
if (cues && cues.length > 0) {
cues.forEach((cue) => {
// Render cue if target time is reached and not in active state
if (cue.startTime <= time && cue.endTime >= time && !cue.isActive) {
cue.isActive = true;
- // eslint-disable-next-line no-undef
- WebVTT.processCues(window, [cue], vttCaptionContainer, cue.cueID);
+ if (settings.get().streaming.text.dispatchForManualRendering) {
+ _triggerCueEnter(cue);
+ } else {
+ // eslint-disable-next-line no-undef
+ WebVTT.processCues(window, [cue], vttCaptionContainer, cue.cueID);
+ }
} else if (cue.isActive && (cue.startTime > time || cue.endTime < time)) {
cue.isActive = false;
- _removeManualCue(cue);
+ if (settings.get().streaming.text.dispatchForManualRendering) {
+ _triggerCueExit(cue);
+ } else {
+ _removeManualCue(cue);
+ }
}
})
}
@@ -705,7 +774,9 @@ function TextTracks(config) {
cues.forEach((cue) => {
if (cue.isActive) {
cue.isActive = false;
- if (vttCaptionContainer) {
+ if (settings.get().streaming.text.dispatchForManualRendering) {
+ _triggerCueExit(cue);
+ } else if (vttCaptionContainer) {
const divs = vttCaptionContainer.childNodes;
for (let i = 0; i < divs.length; ++i) {
if (divs[i].id === cue.cueID) {