node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

View File

@@ -0,0 +1,108 @@
/**
*
* MediaStore is a way to model media state (and changes to it) in a framework- and DOM-agnostic way. Like the difference between Redux
* (the core state manager) and the Redux react wrapper, MediaStore provides the primitive for aggregating media state together in one place.
*
* It receives events as media state change requests (like `mediaplayrequest`) and keeps an internal representation of the complete media
* state after they change, as opposed to querying the media state sources directly every time it needs to check what state something is in.
*
* It doesn't "know" how to update or query the StateOwners itself (like the media element). Rather, it relies on the StateMediator as an interface
* for getting and setting state and relies on the RequestMap as an interface for translating state change requests to state updates (typically also
* deferring to the StateMediator for setting state on the relevant StateOwners).
*
* Additionally, MediaStore state is not optimistically stored when a state change request is dispatched to it. It instead defers to the StateMediator,
* waiting for events from the StateOwners before checking if the state actually changed and only then committing it to its internal representation of MediaState.
*
* @module media-store
*/
import { StateMediator, EventOrAction } from './state-mediator.js';
import { RequestMap } from './request-map.js';
/**
* MediaState is a full representation of all media-related state modeled by the MediaStore and its StateMediator.
* Instead of checking the StateOwners' state directly or on the fly, MediaStore keeps a "snapshot" of the latest
* state, which will be provided to any MediaStore subscribers whenever the state changes, and is arbitrarily retrievable
* from the MediaStore using `getState()`.
*/
export type MediaState = Readonly<{
[K in keyof StateMediator]: ReturnType<StateMediator[K]['get']>;
}> & {
mediaPreviewTime: number;
mediaPreviewImage: string;
mediaPreviewCoords: [string, string, string, string];
};
/**
* MediaStore is the primary abstraction for managing and monitoring media state and other state relevant to the media UI
* (for example, fullscreen behavior or the availability of media-related functionality for a particular browser or runtime, such as volume control or Airplay). This includes:
* - Keeping track of any state changes (examples: Is the media muted? Is the currently playing media live or on demand? What audio tracks are available for the current media?)
* - Sharing the latest state with any MediaStore subscribers whenever it changes
* - Receiving and responding to requests to change the media or related state (examples: I would like the media to be unmuted. I want to start casting now. I want to switch from English subtitles to Japanese.)
* - Wiring up and managing the relationships between media state, media state change requests, and the stateful entities that “own” the majority of this state (examples: the current media element being used, the current root node, the current fullscreen element)
* - Respecting and monitoring changes in certain optional behaviors that impact state or state change requests (examples: I want subtitles/closed captions to be on by default whenever media with them are loaded. I want to disable keeping track of the users preferred volume level.)
*
* @example &lt;caption>Basic Usage.&lt;/caption>
* const mediaStore = createStore({
* media: myVideoElement,
* fullscreenElement: myMediaUIContainerElement,
* // documentElement: advancedRootNodeCase // Will default to `document`
* options: {
* defaultSubtitles: true // enable subtitles/captions by default
* },
* });
*
* // NOTE: In a more realistic example, `myToggleMutedButton` and `mySeekForwardButton` would likely keep track of/"own" its current state. See, e.g. the `<mute-button>` Media Chrome Web Component.
* const unsubscribe = mediaStore.subscribe(state => {
* myToggleMutedButton.textContent = state.muted ? 'Unmute' : 'Mute';
* });
*
* myToggleMutedButton.addEventListener('click', () => {
* const type = mediaStore.getState().muted ? 'mediaunmuterequest' : 'mediamuterequest'
* mediaStore.dispatch({ type });
* });
*
* mySeekForwardButton.addEventListener('click', () => {
* mediaStore.dispatch({
* type: 'mediaseekrequest',
* // NOTE: For all of our state change requests that require additional information, we rely on the `detail` property so we can conform to `CustomEvent`, making interop easier.
* detail: mediaStore.getState().mediaCurrentTime + 15,
* });
* });
*
* // If your code has cases where it swaps out the media element being used
* mediaStore.dispatch({
* type: 'mediaelementchangerequest',
* detail: myAudioElement,
* });
*
* // ... Eventual teardown, when relevant. This is especially relevant for potential garbage collection/memory management considerations.
* unsubscribe();
*
*/
export type MediaStore = {
/**
* A method that expects an "Action" or "Event".Primarily used to make state change requests.
*/
dispatch(eventOrAction: EventOrAction<any>): void;
/**
* A method to get the current state of the MediaStore
*/
getState(): Partial<MediaState>;
/**
* A method to "subscribe" to the MediaStore. A subscriber is just a callback function that is invoked with the current state whenever the MediaStore's state changes. The method returns an "unsubscribe" function, which should be used to tell the MediaStore to remove the corresponding subscriber.
*/
subscribe(handler: (state: Partial<MediaState>) => void): () => void;
};
type MediaStoreConfig = {
media?: any;
fullscreenElement?: any;
documentElement?: any;
stateMediator?: StateMediator;
requestMap?: RequestMap;
options?: any;
monitorStateOwnersOnlyWithSubscriptions?: boolean;
};
/**
* A factory for creating a `MediaStore` instance.
* @param mediaStoreConfig - Configuration object for the `MediaStore`.
*/
declare const createMediaStore: ({ media, fullscreenElement, documentElement, stateMediator, requestMap, options, monitorStateOwnersOnlyWithSubscriptions, }: MediaStoreConfig) => MediaStore;
export default createMediaStore;

View File

@@ -0,0 +1,280 @@
import {
stateMediator as defaultStateMediator,
prepareStateOwners
} from "./state-mediator.js";
import { areValuesEq } from "./util.js";
import { requestMap as defaultRequestMap } from "./request-map.js";
const createMediaStore = ({
media,
fullscreenElement,
documentElement,
stateMediator = defaultStateMediator,
requestMap = defaultRequestMap,
options = {},
monitorStateOwnersOnlyWithSubscriptions = true
}) => {
const callbacks = [];
const stateOwners = {
// Spreading options here since folks should not rely on holding onto references
// for any app-level logic wrt options.
options: { ...options }
};
let state = Object.freeze({
mediaPreviewTime: void 0,
mediaPreviewImage: void 0,
mediaPreviewCoords: void 0,
mediaPreviewChapter: void 0
});
const updateState = (nextStateDelta) => {
if (nextStateDelta == void 0)
return;
if (areValuesEq(nextStateDelta, state)) {
return;
}
state = Object.freeze({
...state,
...nextStateDelta
});
callbacks.forEach((cb) => cb(state));
};
const updateStateFromFacade = () => {
const nextState = Object.entries(stateMediator).reduce(
(nextState2, [stateName, { get }]) => {
nextState2[stateName] = get(stateOwners);
return nextState2;
},
{}
);
updateState(nextState);
};
const stateUpdateHandlers = {};
let nextStateOwners = void 0;
const updateStateOwners = async (nextStateOwnersDelta, nextSubscriberCount) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
const pendingUpdate = !!nextStateOwners;
nextStateOwners = {
...stateOwners,
...nextStateOwners != null ? nextStateOwners : {},
...nextStateOwnersDelta
};
if (pendingUpdate)
return;
await prepareStateOwners(...Object.values(nextStateOwnersDelta));
const shouldTeardownFromSubscriberCount = callbacks.length > 0 && nextSubscriberCount === 0 && monitorStateOwnersOnlyWithSubscriptions;
const mediaChanged = stateOwners.media !== nextStateOwners.media;
const textTracksChanged = ((_a = stateOwners.media) == null ? void 0 : _a.textTracks) !== ((_b = nextStateOwners.media) == null ? void 0 : _b.textTracks);
const videoRenditionsChanged = ((_c = stateOwners.media) == null ? void 0 : _c.videoRenditions) !== ((_d = nextStateOwners.media) == null ? void 0 : _d.videoRenditions);
const audioTracksChanged = ((_e = stateOwners.media) == null ? void 0 : _e.audioTracks) !== ((_f = nextStateOwners.media) == null ? void 0 : _f.audioTracks);
const remoteChanged = ((_g = stateOwners.media) == null ? void 0 : _g.remote) !== ((_h = nextStateOwners.media) == null ? void 0 : _h.remote);
const rootNodeChanged = stateOwners.documentElement !== nextStateOwners.documentElement;
const teardownMedia = !!stateOwners.media && (mediaChanged || shouldTeardownFromSubscriberCount);
const teardownTextTracks = !!((_i = stateOwners.media) == null ? void 0 : _i.textTracks) && (textTracksChanged || shouldTeardownFromSubscriberCount);
const teardownVideoRenditions = !!((_j = stateOwners.media) == null ? void 0 : _j.videoRenditions) && (videoRenditionsChanged || shouldTeardownFromSubscriberCount);
const teardownAudioTracks = !!((_k = stateOwners.media) == null ? void 0 : _k.audioTracks) && (audioTracksChanged || shouldTeardownFromSubscriberCount);
const teardownRemote = !!((_l = stateOwners.media) == null ? void 0 : _l.remote) && (remoteChanged || shouldTeardownFromSubscriberCount);
const teardownRootNode = !!stateOwners.documentElement && (rootNodeChanged || shouldTeardownFromSubscriberCount);
const teardownSomething = teardownMedia || teardownTextTracks || teardownVideoRenditions || teardownAudioTracks || teardownRemote || teardownRootNode;
const shouldSetupFromSubscriberCount = callbacks.length === 0 && nextSubscriberCount === 1 && monitorStateOwnersOnlyWithSubscriptions;
const setupMedia = !!nextStateOwners.media && (mediaChanged || shouldSetupFromSubscriberCount);
const setupTextTracks = !!((_m = nextStateOwners.media) == null ? void 0 : _m.textTracks) && (textTracksChanged || shouldSetupFromSubscriberCount);
const setupVideoRenditions = !!((_n = nextStateOwners.media) == null ? void 0 : _n.videoRenditions) && (videoRenditionsChanged || shouldSetupFromSubscriberCount);
const setupAudioTracks = !!((_o = nextStateOwners.media) == null ? void 0 : _o.audioTracks) && (audioTracksChanged || shouldSetupFromSubscriberCount);
const setupRemote = !!((_p = nextStateOwners.media) == null ? void 0 : _p.remote) && (remoteChanged || shouldSetupFromSubscriberCount);
const setupRootNode = !!nextStateOwners.documentElement && (rootNodeChanged || shouldSetupFromSubscriberCount);
const setupSomething = setupMedia || setupTextTracks || setupVideoRenditions || setupAudioTracks || setupRemote || setupRootNode;
const somethingToDo = teardownSomething || setupSomething;
if (!somethingToDo) {
Object.entries(nextStateOwners).forEach(
([stateOwnerName, stateOwner]) => {
stateOwners[stateOwnerName] = stateOwner;
}
);
updateStateFromFacade();
nextStateOwners = void 0;
return;
}
Object.entries(stateMediator).forEach(
([
stateName,
{
get,
mediaEvents = [],
textTracksEvents = [],
videoRenditionsEvents = [],
audioTracksEvents = [],
remoteEvents = [],
rootEvents = [],
stateOwnersUpdateHandlers = []
}
]) => {
if (!stateUpdateHandlers[stateName]) {
stateUpdateHandlers[stateName] = {};
}
const handler = (event) => {
const nextValue = get(stateOwners, event);
updateState({ [stateName]: nextValue });
};
let prevHandler;
prevHandler = stateUpdateHandlers[stateName].mediaEvents;
mediaEvents.forEach((eventType) => {
if (prevHandler && teardownMedia) {
stateOwners.media.removeEventListener(eventType, prevHandler);
stateUpdateHandlers[stateName].mediaEvents = void 0;
}
if (setupMedia) {
nextStateOwners.media.addEventListener(eventType, handler);
stateUpdateHandlers[stateName].mediaEvents = handler;
}
});
prevHandler = stateUpdateHandlers[stateName].textTracksEvents;
textTracksEvents.forEach((eventType) => {
var _a2, _b2;
if (prevHandler && teardownTextTracks) {
(_a2 = stateOwners.media.textTracks) == null ? void 0 : _a2.removeEventListener(
eventType,
prevHandler
);
stateUpdateHandlers[stateName].textTracksEvents = void 0;
}
if (setupTextTracks) {
(_b2 = nextStateOwners.media.textTracks) == null ? void 0 : _b2.addEventListener(
eventType,
handler
);
stateUpdateHandlers[stateName].textTracksEvents = handler;
}
});
prevHandler = stateUpdateHandlers[stateName].videoRenditionsEvents;
videoRenditionsEvents.forEach((eventType) => {
var _a2, _b2;
if (prevHandler && teardownVideoRenditions) {
(_a2 = stateOwners.media.videoRenditions) == null ? void 0 : _a2.removeEventListener(
eventType,
prevHandler
);
stateUpdateHandlers[stateName].videoRenditionsEvents = void 0;
}
if (setupVideoRenditions) {
(_b2 = nextStateOwners.media.videoRenditions) == null ? void 0 : _b2.addEventListener(
eventType,
handler
);
stateUpdateHandlers[stateName].videoRenditionsEvents = handler;
}
});
prevHandler = stateUpdateHandlers[stateName].audioTracksEvents;
audioTracksEvents.forEach((eventType) => {
var _a2, _b2;
if (prevHandler && teardownAudioTracks) {
(_a2 = stateOwners.media.audioTracks) == null ? void 0 : _a2.removeEventListener(
eventType,
prevHandler
);
stateUpdateHandlers[stateName].audioTracksEvents = void 0;
}
if (setupAudioTracks) {
(_b2 = nextStateOwners.media.audioTracks) == null ? void 0 : _b2.addEventListener(
eventType,
handler
);
stateUpdateHandlers[stateName].audioTracksEvents = handler;
}
});
prevHandler = stateUpdateHandlers[stateName].remoteEvents;
remoteEvents.forEach((eventType) => {
var _a2, _b2;
if (prevHandler && teardownRemote) {
(_a2 = stateOwners.media.remote) == null ? void 0 : _a2.removeEventListener(
eventType,
prevHandler
);
stateUpdateHandlers[stateName].remoteEvents = void 0;
}
if (setupRemote) {
(_b2 = nextStateOwners.media.remote) == null ? void 0 : _b2.addEventListener(eventType, handler);
stateUpdateHandlers[stateName].remoteEvents = handler;
}
});
prevHandler = stateUpdateHandlers[stateName].rootEvents;
rootEvents.forEach((eventType) => {
if (prevHandler && teardownRootNode) {
stateOwners.documentElement.removeEventListener(
eventType,
prevHandler
);
stateUpdateHandlers[stateName].rootEvents = void 0;
}
if (setupRootNode) {
nextStateOwners.documentElement.addEventListener(
eventType,
handler
);
stateUpdateHandlers[stateName].rootEvents = handler;
}
});
const prevHandlerTeardown = stateUpdateHandlers[stateName].stateOwnersUpdateHandlers;
stateOwnersUpdateHandlers.forEach((fn) => {
if (prevHandlerTeardown && teardownSomething) {
prevHandlerTeardown();
}
if (setupSomething) {
stateUpdateHandlers[stateName].stateOwnersUpdateHandlers = fn(
handler,
nextStateOwners
);
}
});
}
);
Object.entries(nextStateOwners).forEach(([stateOwnerName, stateOwner]) => {
stateOwners[stateOwnerName] = stateOwner;
});
updateStateFromFacade();
nextStateOwners = void 0;
};
updateStateOwners({ media, fullscreenElement, documentElement, options });
return {
// note that none of these cases directly interact with the media element, root node, full screen element, etc.
// note these "actions" could just be the events if we wanted, especially if we normalize on "detail" for
// any payload-relevant values
// This is roughly equivalent to our used to be in our state requests dictionary object, though much of the
// "heavy lifting" is now moved into the facade `set()`
dispatch(action) {
const { type, detail } = action;
if (requestMap[type]) {
updateState(requestMap[type](stateMediator, stateOwners, action));
return;
}
if (type === "mediaelementchangerequest") {
updateStateOwners({ media: detail });
} else if (type === "fullscreenelementchangerequest") {
updateStateOwners({ fullscreenElement: detail });
} else if (type === "documentelementchangerequest") {
updateStateOwners({ documentElement: detail });
} else if (type === "optionschangerequest") {
Object.entries(detail != null ? detail : {}).forEach(([optionName, optionValue]) => {
stateOwners.options[optionName] = optionValue;
});
}
},
getState() {
return state;
},
subscribe(callback) {
updateStateOwners({}, callbacks.length + 1);
callbacks.push(callback);
callback(state);
return () => {
const idx = callbacks.indexOf(callback);
if (idx >= 0) {
updateStateOwners({}, callbacks.length - 1);
callbacks.splice(idx, 1);
}
};
}
};
};
var media_store_default = createMediaStore;
export {
media_store_default as default
};

View File

@@ -0,0 +1,20 @@
import { MediaUIEvents } from '../constants.js';
import { StateMediator, StateOwners } from './state-mediator.js';
import { MediaState } from './media-store.js';
export type MediaUIEventsType = typeof MediaUIEvents[keyof typeof MediaUIEvents];
export type MediaRequestTypes = Exclude<MediaUIEventsType, 'registermediastatereceiver' | 'unregistermediastatereceiver' | 'mediashowtexttracksrequest' | 'mediahidetexttracksrequest'>;
/** @TODO Make this definition more precise (CJP) */
/**
*
* RequestMap provides a stateless, well-defined API for translating state change requests to related side effects to attempt to fulfill said request and
* any other appropriate state changes that should occur as a result. Most often (but not always), those will simply rely on the StateMediator's `set()`
* method for the corresponding state to update the StateOwners state. RequestMap is designed to be used by a MediaStore, which owns all of the wiring up
* and persistence of e.g. StateOwners, MediaState, StateMediator, and the RequestMap.
*
* For any modeled state change request, the RequestMap defines a key, K, which directly maps to the state change request type (e.g. `mediapauserequest`, `mediaseekrequest`, etc.),
* whose value is a function that defines the appropriate side effects of the request that will, under normal circumstances, (eventually) result in actual state changes.
*/
export type RequestMap = {
[K in MediaRequestTypes]: (stateMediator: StateMediator, stateOwners: StateOwners, action: Partial<Pick<CustomEvent<any>, 'type' | 'detail'>>) => Partial<MediaState> | undefined | void;
};
export declare const requestMap: RequestMap;

View File

@@ -0,0 +1,204 @@
import { globalThis } from "../utils/server-safe-globals.js";
import {
MediaUIEvents,
StreamTypes,
TextTrackKinds,
TextTrackModes
} from "../constants.js";
import {
getTextTracksList,
parseTracks,
updateTracksModeTo
} from "../utils/captions.js";
import { getSubtitleTracks, toggleSubtitleTracks } from "./util.js";
const requestMap = {
/**
* @TODO Consider adding state to `StateMediator` for e.g. `mediaThumbnailCues` and use that for derived state here (CJP)
*/
[MediaUIEvents.MEDIA_PREVIEW_REQUEST](stateMediator, stateOwners, { detail }) {
var _a, _b, _c;
const { media } = stateOwners;
const mediaPreviewTime = detail != null ? detail : void 0;
let mediaPreviewImage = void 0;
let mediaPreviewCoords = void 0;
if (media && mediaPreviewTime != null) {
const [track] = getTextTracksList(media, {
kind: TextTrackKinds.METADATA,
label: "thumbnails"
});
const cue = Array.prototype.find.call((_a = track == null ? void 0 : track.cues) != null ? _a : [], (c, i, cs) => {
if (i === 0)
return c.endTime > mediaPreviewTime;
if (i === cs.length - 1)
return c.startTime <= mediaPreviewTime;
return c.startTime <= mediaPreviewTime && c.endTime > mediaPreviewTime;
});
if (cue) {
const base = !/'^(?:[a-z]+:)?\/\//i.test(cue.text) ? (_b = media == null ? void 0 : media.querySelector(
'track[label="thumbnails"]'
)) == null ? void 0 : _b.src : void 0;
const url = new URL(cue.text, base);
const previewCoordsStr = new URLSearchParams(url.hash).get("#xywh");
mediaPreviewCoords = previewCoordsStr.split(",").map((numStr) => +numStr);
mediaPreviewImage = url.href;
}
}
const mediaDuration = stateMediator.mediaDuration.get(stateOwners);
const mediaChaptersCues = stateMediator.mediaChaptersCues.get(stateOwners);
let mediaPreviewChapter = (_c = mediaChaptersCues.find((c, i, cs) => {
if (i === cs.length - 1 && mediaDuration === c.endTime) {
return c.startTime <= mediaPreviewTime && c.endTime >= mediaPreviewTime;
}
return c.startTime <= mediaPreviewTime && c.endTime > mediaPreviewTime;
})) == null ? void 0 : _c.text;
if (detail != null && mediaPreviewChapter == null) {
mediaPreviewChapter = "";
}
return {
mediaPreviewTime,
mediaPreviewImage,
mediaPreviewCoords,
mediaPreviewChapter
};
},
[MediaUIEvents.MEDIA_PAUSE_REQUEST](stateMediator, stateOwners) {
const key = "mediaPaused";
const value = true;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_PLAY_REQUEST](stateMediator, stateOwners) {
var _a;
const key = "mediaPaused";
const value = false;
const live = stateMediator.mediaStreamType.get(stateOwners) === StreamTypes.LIVE;
if (live) {
const notDvr = !(stateMediator.mediaTargetLiveWindow.get(stateOwners) > 0);
const liveEdgeTime = (_a = stateMediator.mediaSeekable.get(stateOwners)) == null ? void 0 : _a[1];
if (notDvr && liveEdgeTime) {
stateMediator.mediaCurrentTime.set(liveEdgeTime, stateOwners);
}
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_PLAYBACK_RATE_REQUEST](stateMediator, stateOwners, { detail }) {
const key = "mediaPlaybackRate";
const value = detail;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_MUTE_REQUEST](stateMediator, stateOwners) {
const key = "mediaMuted";
const value = true;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_UNMUTE_REQUEST](stateMediator, stateOwners) {
const key = "mediaMuted";
const value = false;
if (!stateMediator.mediaVolume.get(stateOwners)) {
stateMediator.mediaVolume.set(0.25, stateOwners);
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_VOLUME_REQUEST](stateMediator, stateOwners, { detail }) {
const key = "mediaVolume";
const value = detail;
if (value && stateMediator.mediaMuted.get(stateOwners)) {
stateMediator.mediaMuted.set(false, stateOwners);
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_SEEK_REQUEST](stateMediator, stateOwners, { detail }) {
const key = "mediaCurrentTime";
const value = detail;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_SEEK_TO_LIVE_REQUEST](stateMediator, stateOwners) {
var _a;
const key = "mediaCurrentTime";
const value = (_a = stateMediator.mediaSeekable.get(stateOwners)) == null ? void 0 : _a[1];
if (Number.isNaN(Number(value)))
return;
stateMediator[key].set(value, stateOwners);
},
// Text Tracks state change requests
[MediaUIEvents.MEDIA_SHOW_SUBTITLES_REQUEST](_stateMediator, stateOwners, { detail }) {
var _a;
const { options } = stateOwners;
const tracks = getSubtitleTracks(stateOwners);
const tracksToUpdate = parseTracks(detail);
const preferredLanguage = (_a = tracksToUpdate[0]) == null ? void 0 : _a.language;
if (preferredLanguage && !options.noSubtitlesLangPref) {
globalThis.localStorage.setItem(
"media-chrome-pref-subtitles-lang",
preferredLanguage
);
}
updateTracksModeTo(TextTrackModes.SHOWING, tracks, tracksToUpdate);
},
[MediaUIEvents.MEDIA_DISABLE_SUBTITLES_REQUEST](_stateMediator, stateOwners, { detail }) {
const tracks = getSubtitleTracks(stateOwners);
const tracksToUpdate = detail != null ? detail : [];
updateTracksModeTo(TextTrackModes.DISABLED, tracks, tracksToUpdate);
},
[MediaUIEvents.MEDIA_TOGGLE_SUBTITLES_REQUEST](_stateMediator, stateOwners, { detail }) {
toggleSubtitleTracks(stateOwners, detail);
},
// Renditions/Tracks state change requests
[MediaUIEvents.MEDIA_RENDITION_REQUEST](stateMediator, stateOwners, { detail }) {
const key = "mediaRenditionSelected";
const value = detail;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_AUDIO_TRACK_REQUEST](stateMediator, stateOwners, { detail }) {
const key = "mediaAudioTrackEnabled";
const value = detail;
stateMediator[key].set(value, stateOwners);
},
// State change requests dependent on root node
[MediaUIEvents.MEDIA_ENTER_PIP_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsPip";
const value = true;
if (stateMediator.mediaIsFullscreen.get(stateOwners)) {
stateMediator.mediaIsFullscreen.set(false, stateOwners);
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_EXIT_PIP_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsPip";
const value = false;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_ENTER_FULLSCREEN_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsFullscreen";
const value = true;
if (stateMediator.mediaIsPip.get(stateOwners)) {
stateMediator.mediaIsPip.set(false, stateOwners);
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_EXIT_FULLSCREEN_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsFullscreen";
const value = false;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_ENTER_CAST_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsCasting";
const value = true;
if (stateMediator.mediaIsFullscreen.get(stateOwners)) {
stateMediator.mediaIsFullscreen.set(false, stateOwners);
}
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_EXIT_CAST_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsCasting";
const value = false;
stateMediator[key].set(value, stateOwners);
},
[MediaUIEvents.MEDIA_AIRPLAY_REQUEST](stateMediator, stateOwners) {
const key = "mediaIsAirplaying";
const value = true;
stateMediator[key].set(value, stateOwners);
}
};
export {
requestMap
};

View File

@@ -0,0 +1,192 @@
import { AvailabilityStates, StreamTypes } from '../constants.js';
export type Rendition = {
src?: string;
id?: string;
width?: number;
height?: number;
bitrate?: number;
frameRate?: number;
codec?: string;
readonly selected?: boolean;
};
export type AudioTrack = {
id?: string;
kind?: string;
label: string;
language: string;
enabled: boolean;
};
/**
*
* MediaStateOwner is in a sense both a subset and a superset of `HTMLVideoElement` and is used as the primary
* "source of truth" for media state, as well as the primary target for state change requests.
*
* It is a subset insofar as only the `play()` method, the `paused` property, and the `addEventListener()`/`removeEventListener()` methods
* are *required* and required to conform to their definition of `HTMLMediaElement` on the entity used. All other interfaces
* (properties, methods, events, etc.) are optional, but, when present, *must* conform to `HTMLMediaElement`/`HTMLVideoElement`
* to avoid unexpected state behavior. This includes, for example, ensuring state updates occur *before* related events are fired
* that are used to monitor for potential state changes.
*
* It is a superset insofar as it supports an extended interface for media state that may be browser-specific (e.g. `webkit`-prefixed
* properties/methods) or are not immediately derivable from primary media state or other state owners. These include things like
* `videoRenditions` for e.g. HTTP Adaptive Streaming media (such as HLS or MPEG-DASH), `audioTracks`, or `streamType`, which identifies
* whether the media ("stream") is "live" or "on demand". Several of these are specified and formalized on https://github.com/video-dev/media-ui-extensions.
*/
export type MediaStateOwner = Partial<HTMLVideoElement> & Pick<HTMLMediaElement, 'play' | 'paused' | 'addEventListener' | 'removeEventListener'> & {
streamType?: StreamTypes;
targetLiveWindow?: number;
liveEdgeStart?: number;
videoRenditions?: Rendition[] & EventTarget & {
selectedIndex?: number;
};
audioTracks?: AudioTrack[] & EventTarget;
requestCast?: () => any;
webkitDisplayingFullscreen?: boolean;
webkitPresentationMode?: 'fullscreen' | 'picture-in-picture';
webkitEnterFullscreen?: () => any;
webkitCurrentPlaybackTargetIsWireless?: boolean;
webkitShowPlaybackTargetPicker?: () => any;
};
export type RootNodeStateOwner = Partial<Document | ShadowRoot>;
export type FullScreenElementStateOwner = Partial<HTMLElement> & EventTarget;
export type StateOption = {
defaultSubtitles?: boolean;
defaultStreamType?: StreamTypes;
defaultDuration?: number;
liveEdgeOffset?: number;
noVolumePref?: boolean;
noSubtitlesLangPref?: boolean;
};
/**
*
* StateOwners are anything considered a source of truth or a target for updates for state. The media element (or "element") is a source of truth for the state of media playback,
* but other things could also be a source of truth for information about the media. These include:
*
* - media - the media element
* - fullscreenElement - the element that will be used when in full screen (e.g. for Media Chrome, this will typically be the MediaController)
* - documentElement - top level node for DOM context (usually document and defaults to `document` in `createMediaStore()`)
* - options - state behavior/user preferences (e.g. defaultSubtitles to enable subtitles by default as the relevant state or state owners change)
*/
export type StateOwners = {
media?: MediaStateOwner;
documentElement?: RootNodeStateOwner;
fullscreenElement?: FullScreenElementStateOwner;
options?: StateOption;
};
export type EventOrAction<D = undefined> = {
type: string;
detail?: D;
target?: EventTarget;
};
export type FacadeGetter<T, D = T> = (stateOwners: StateOwners, event?: EventOrAction<D>) => T;
export type FacadeSetter<T> = (value: T, stateOwners: StateOwners) => void;
export type StateOwnerUpdateHandler<T> = (handler: (value: T) => void, stateOwners: StateOwners) => void;
export type ReadonlyFacadeProp<T, D = T> = {
get: FacadeGetter<T, D>;
mediaEvents?: string[];
textTracksEvents?: string[];
videoRenditionsEvents?: string[];
audioTracksEvents?: string[];
remoteEvents?: string[];
rootEvents?: string[];
stateOwnersUpdateHandlers?: StateOwnerUpdateHandler<T>[];
};
export type FacadeProp<T, S = T, D = T> = ReadonlyFacadeProp<T, D> & {
set: FacadeSetter<S>;
};
/**
*
* StateMediator provides a stateless, well-defined API for getting and setting/updating media-relevant state on a set of (stateful) StateOwners.
* In addition, it identifies monitoring conditions for potential state changes for any given bit of state. StateMediator is designed to be used
* by a MediaStore, which owns all of the wiring up and persistence of e.g. StateOwners, MediaState, and the StateMediator.
*
* For any modeled state, the StateMediator defines a key, K, which names the state (e.g. `mediaPaused`, `mediaSubtitlesShowing`, `mediaCastUnavailable`,
* etc.), whose value defines the aforementioned using:
*
* - `get(stateOwners, event)` - Retrieves the current state of K from StateOwners, potentially using the (optional) event to help identify the state.
* - `set(value, stateOwners)` (Optional, not available for `Readonly` state) - Interact with StateOwners via their interfaces to (directly or indirectly) update the state of K, using the value to determine the intended state change side effects.
* - `mediaEvents[]` (Optional) - An array of event types to monitor on `stateOwners.media` for potential changes in the state of K.
* - `textTracksEvents[]` (Optional) - An array of event types to monitor on `stateOwners.media.textTracks` for potential changes in the state of K.
* - `videoRenditionsEvents[]` (Optional) - An array of event types to monitor on `stateOwners.media.videoRenditions` for potential changes in the state of K.
* - `audioTracksEvents[]` (Optional) - An array of event types to monitor on `stateOwners.media.audioTracks` for potential changes in the state of K.
* - `remoteEvents[]` (Optional) - An array of event types to monitor on `stateOwners.media.remote` for potential changes in the state of K.
* - `rootEvents[]` (Optional) - An array of event types to monitor on `stateOwners.documentElement` for potential changes in the state of K.
* - `stateOwnersUpdateHandlers[]` (Optional) - An array of functions that define arbitrary code for monitoring or causing state changes, optionally returning a "teardown" function for cleanup.
*
* @example &lt;caption>Basic Example (NOTE: This is for informative use only. StateMediator is not intended to be used directly).&lt;/caption>
*
* // Simple stateOwners example
* const stateOwners = {
* media: myVideoElement,
* fullscreenElement: myMediaUIContainerElement,
* documentElement: document,
* };
*
* // Current mediaPaused state
* let mediaPaused = stateMediator.mediaPaused.get(stateOwners);
*
* // Event handler to update mediaPaused to its latest state;
* const updateMediaPausedEventHandler = (event) => {
* mediaPaused = stateMediator.mediaPaused.get(stateOwners, event);
* };
*
* // Monitor for potential changes to mediaPaused state.
* stateMediator.mediaPaused.mediaEvents.forEach(eventType => {
* stateOwners.media.addEventListener(eventType, updateMediaPausedEventHandler);
* });
*
* // Function to toggle between mediaPaused and !mediaPaused (media "unpaused", or "playing" under normal conditions)
* const toggleMediaPaused = () => {
* const nextMediaPaused = !mediaPaused;
* stateMediator.mediaPaused.set(nextMediaPaused, stateOwners);
* };
*
*
* // ... Eventual teardown, when relevant. This is especially relevant for potential garbage collection/memory management considerations.
* stateMediator.mediaPaused.mediaEvents.forEach(eventType => {
* stateOwners.media.removeEventListener(eventType, updateMediaPausedEventHandler);
* });
*
*/
export type StateMediator = {
mediaWidth: ReadonlyFacadeProp<number>;
mediaHeight: ReadonlyFacadeProp<number>;
mediaPaused: FacadeProp<HTMLMediaElement['paused']>;
mediaHasPlayed: ReadonlyFacadeProp<boolean>;
mediaEnded: ReadonlyFacadeProp<HTMLMediaElement['ended']>;
mediaPlaybackRate: FacadeProp<HTMLMediaElement['playbackRate']>;
mediaMuted: FacadeProp<HTMLMediaElement['muted']>;
mediaVolume: FacadeProp<HTMLMediaElement['volume']>;
mediaVolumeLevel: ReadonlyFacadeProp<'high' | 'medium' | 'low' | 'off'>;
mediaCurrentTime: FacadeProp<HTMLMediaElement['currentTime']>;
mediaDuration: ReadonlyFacadeProp<HTMLMediaElement['duration']>;
mediaLoading: ReadonlyFacadeProp<boolean>;
mediaSeekable: ReadonlyFacadeProp<[number, number] | undefined>;
mediaBuffered: ReadonlyFacadeProp<[number, number][]>;
mediaStreamType: ReadonlyFacadeProp<StreamTypes>;
mediaTargetLiveWindow: ReadonlyFacadeProp<number>;
mediaTimeIsLive: ReadonlyFacadeProp<boolean>;
mediaSubtitlesList: ReadonlyFacadeProp<Pick<TextTrack, 'kind' | 'label' | 'language'>[]>;
mediaSubtitlesShowing: ReadonlyFacadeProp<Pick<TextTrack, 'kind' | 'label' | 'language'>[]>;
mediaChaptersCues: ReadonlyFacadeProp<Pick<VTTCue, 'text' | 'startTime' | 'endTime'>[]>;
mediaIsPip: FacadeProp<boolean>;
mediaRenditionList: ReadonlyFacadeProp<Rendition[]>;
mediaRenditionSelected: FacadeProp<string, string>;
mediaAudioTrackList: ReadonlyFacadeProp<{
id?: string;
}[]>;
mediaAudioTrackEnabled: FacadeProp<string, string>;
mediaIsFullscreen: FacadeProp<boolean>;
mediaIsCasting: FacadeProp<boolean, boolean, 'NO_DEVICES_AVAILABLE' | 'NOT_CONNECTED' | 'CONNECTING' | 'CONNECTED'>;
mediaIsAirplaying: FacadeProp<boolean>;
mediaFullscreenUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaPipUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaVolumeUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaCastUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaAirplayUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaRenditionUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
mediaAudioTrackUnavailable: ReadonlyFacadeProp<AvailabilityStates | undefined>;
};
export declare const volumeSupportPromise: Promise<boolean>;
export declare const prepareStateOwners: (...stateOwners: any[]) => Promise<void>;
export declare const stateMediator: StateMediator;

View File

@@ -0,0 +1,808 @@
import { document, globalThis } from "../utils/server-safe-globals.js";
import {
AvailabilityStates,
StreamTypes,
TextTrackKinds
} from "../constants.js";
import { containsComposedNode } from "../utils/element-utils.js";
import { enterFullscreen, exitFullscreen, isFullscreen } from "../utils/fullscreen-api.js";
import {
airplaySupported,
castSupported,
fullscreenSupported,
hasFullscreenSupport,
hasPipSupport,
hasVolumeSupportAsync,
pipSupported
} from "../utils/platform-tests.js";
import {
getShowingSubtitleTracks,
getSubtitleTracks,
toggleSubtitleTracks
} from "./util.js";
import { getTextTracksList } from "../utils/captions.js";
import { isValidNumber } from "../utils/utils.js";
const StreamTypeValues = Object.values(StreamTypes);
let volumeSupported;
const volumeSupportPromise = hasVolumeSupportAsync().then((supported) => {
volumeSupported = supported;
return volumeSupported;
});
const prepareStateOwners = async (...stateOwners) => {
await Promise.all(
stateOwners.filter((x) => x).map(async (stateOwner) => {
if (!("localName" in stateOwner && stateOwner instanceof globalThis.HTMLElement)) {
return;
}
const name = stateOwner.localName;
if (!name.includes("-"))
return;
const classDef = globalThis.customElements.get(name);
if (classDef && stateOwner instanceof classDef)
return;
await globalThis.customElements.whenDefined(name);
globalThis.customElements.upgrade(stateOwner);
})
);
};
const stateMediator = {
mediaWidth: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.videoWidth) != null ? _a : 0;
},
mediaEvents: ["resize"]
},
mediaHeight: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.videoHeight) != null ? _a : 0;
},
mediaEvents: ["resize"]
},
mediaPaused: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.paused) != null ? _a : true;
},
set(value, stateOwners) {
var _a;
const { media } = stateOwners;
if (!media)
return;
if (value) {
media.pause();
} else {
(_a = media.play()) == null ? void 0 : _a.catch(() => {
});
}
},
mediaEvents: ["play", "playing", "pause", "emptied"]
},
mediaHasPlayed: {
// We want to let the user know that the media started playing at any point (`media-has-played`).
// Since these propagators are all called when boostrapping state, let's verify this is
// a real playing event by checking that 1) there's media and 2) it isn't currently paused.
get(stateOwners, event) {
const { media } = stateOwners;
if (!media)
return false;
if (!event)
return !media.paused;
return event.type === "playing";
},
mediaEvents: ["playing", "emptied"]
},
mediaEnded: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.ended) != null ? _a : false;
},
mediaEvents: ["seeked", "ended", "emptied"]
},
mediaPlaybackRate: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.playbackRate) != null ? _a : 1;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!media)
return;
if (!Number.isFinite(+value))
return;
media.playbackRate = +value;
},
mediaEvents: ["ratechange", "loadstart"]
},
mediaMuted: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.muted) != null ? _a : false;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!media)
return;
media.muted = value;
},
mediaEvents: ["volumechange"]
},
mediaVolume: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.volume) != null ? _a : 1;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!media)
return;
try {
if (value == null) {
globalThis.localStorage.removeItem("media-chrome-pref-volume");
} else {
globalThis.localStorage.setItem(
"media-chrome-pref-volume",
value.toString()
);
}
} catch (err) {
}
if (!Number.isFinite(+value))
return;
media.volume = +value;
},
mediaEvents: ["volumechange"],
stateOwnersUpdateHandlers: [
(handler, stateOwners) => {
const {
options: { noVolumePref }
} = stateOwners;
if (noVolumePref)
return;
try {
const volumePref = globalThis.localStorage.getItem(
"media-chrome-pref-volume"
);
if (volumePref == null)
return;
stateMediator.mediaVolume.set(+volumePref, stateOwners);
handler(+volumePref);
} catch (e) {
console.debug("Error getting volume pref", e);
}
}
]
},
// NOTE: Keeping this roughly equivalent to prior impl to reduce number of changes,
// however we may want to model "derived" state differently from "primary" state
// (in this case, derived === mediaVolumeLevel, primary === mediaMuted, mediaVolume) (CJP)
mediaVolumeLevel: {
get(stateOwners) {
const { media } = stateOwners;
if (typeof (media == null ? void 0 : media.volume) == "undefined")
return "high";
if (media.muted || media.volume === 0)
return "off";
if (media.volume < 0.5)
return "low";
if (media.volume < 0.75)
return "medium";
return "high";
},
mediaEvents: ["volumechange"]
},
mediaCurrentTime: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return (_a = media == null ? void 0 : media.currentTime) != null ? _a : 0;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!media || !isValidNumber(value))
return;
media.currentTime = value;
},
mediaEvents: ["timeupdate", "loadedmetadata"]
},
mediaDuration: {
get(stateOwners) {
const { media, options: { defaultDuration } = {} } = stateOwners;
if (defaultDuration && (!media || !media.duration || Number.isNaN(media.duration) || !Number.isFinite(media.duration))) {
return defaultDuration;
}
return Number.isFinite(media == null ? void 0 : media.duration) ? media.duration : Number.NaN;
},
mediaEvents: ["durationchange", "loadedmetadata", "emptied"]
},
mediaLoading: {
get(stateOwners) {
const { media } = stateOwners;
return (media == null ? void 0 : media.readyState) < 3;
},
mediaEvents: ["waiting", "playing", "emptied"]
},
mediaSeekable: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
if (!((_a = media == null ? void 0 : media.seekable) == null ? void 0 : _a.length))
return void 0;
const start = media.seekable.start(0);
const end = media.seekable.end(media.seekable.length - 1);
if (!start && !end)
return void 0;
return [Number(start.toFixed(3)), Number(end.toFixed(3))];
},
mediaEvents: ["loadedmetadata", "emptied", "progress", "seekablechange"]
},
mediaBuffered: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
const timeRanges = (_a = media == null ? void 0 : media.buffered) != null ? _a : [];
return Array.from(timeRanges).map((_, i) => [
Number(timeRanges.start(i).toFixed(3)),
Number(timeRanges.end(i).toFixed(3))
]);
},
mediaEvents: ["progress", "emptied"]
},
mediaStreamType: {
get(stateOwners) {
const { media, options: { defaultStreamType } = {} } = stateOwners;
const usedDefaultStreamType = [
StreamTypes.LIVE,
StreamTypes.ON_DEMAND
].includes(defaultStreamType) ? defaultStreamType : void 0;
if (!media)
return usedDefaultStreamType;
const { streamType } = media;
if (StreamTypeValues.includes(streamType)) {
if (streamType === StreamTypes.UNKNOWN) {
return usedDefaultStreamType;
}
return streamType;
}
const duration = media.duration;
if (duration === Infinity) {
return StreamTypes.LIVE;
} else if (Number.isFinite(duration)) {
return StreamTypes.ON_DEMAND;
}
return usedDefaultStreamType;
},
mediaEvents: [
"emptied",
"durationchange",
"loadedmetadata",
"streamtypechange"
]
},
mediaTargetLiveWindow: {
get(stateOwners) {
const { media } = stateOwners;
if (!media)
return Number.NaN;
const { targetLiveWindow } = media;
const streamType = stateMediator.mediaStreamType.get(stateOwners);
if ((targetLiveWindow == null || Number.isNaN(targetLiveWindow)) && streamType === StreamTypes.LIVE) {
return 0;
}
return targetLiveWindow;
},
mediaEvents: [
"emptied",
"durationchange",
"loadedmetadata",
"streamtypechange",
"targetlivewindowchange"
]
},
mediaTimeIsLive: {
get(stateOwners) {
const {
media,
// Default to 10 seconds
options: { liveEdgeOffset = 10 } = {}
} = stateOwners;
if (!media)
return false;
if (typeof media.liveEdgeStart === "number") {
if (Number.isNaN(media.liveEdgeStart))
return false;
return media.currentTime >= media.liveEdgeStart;
}
const live = stateMediator.mediaStreamType.get(stateOwners) === StreamTypes.LIVE;
if (!live)
return false;
const seekable = media.seekable;
if (!seekable)
return true;
if (!seekable.length)
return false;
const liveEdgeStart = seekable.end(seekable.length - 1) - liveEdgeOffset;
return media.currentTime >= liveEdgeStart;
},
mediaEvents: ["playing", "timeupdate", "progress", "waiting", "emptied"]
},
// Text Tracks modeling
mediaSubtitlesList: {
get(stateOwners) {
return getSubtitleTracks(stateOwners).map(
({ kind, label, language }) => ({ kind, label, language })
);
},
mediaEvents: ["loadstart"],
textTracksEvents: ["addtrack", "removetrack"]
},
mediaSubtitlesShowing: {
get(stateOwners) {
return getShowingSubtitleTracks(stateOwners).map(
({ kind, label, language }) => ({ kind, label, language })
);
},
mediaEvents: ["loadstart"],
textTracksEvents: ["addtrack", "removetrack", "change"],
stateOwnersUpdateHandlers: [
(_handler, stateOwners) => {
var _a, _b;
const { media, options } = stateOwners;
if (!media)
return;
const updateDefaultSubtitlesCallback = (event) => {
var _a2;
if (!options.defaultSubtitles)
return;
const nonSubsEvent = event && ![TextTrackKinds.CAPTIONS, TextTrackKinds.SUBTITLES].includes(
// @ts-ignore
(_a2 = event == null ? void 0 : event.track) == null ? void 0 : _a2.kind
);
if (nonSubsEvent)
return;
toggleSubtitleTracks(stateOwners, true);
};
(_a = media.textTracks) == null ? void 0 : _a.addEventListener(
"addtrack",
updateDefaultSubtitlesCallback
);
(_b = media.textTracks) == null ? void 0 : _b.addEventListener(
"removetrack",
updateDefaultSubtitlesCallback
);
updateDefaultSubtitlesCallback();
return () => {
var _a2, _b2;
(_a2 = media.textTracks) == null ? void 0 : _a2.removeEventListener(
"addtrack",
updateDefaultSubtitlesCallback
);
(_b2 = media.textTracks) == null ? void 0 : _b2.removeEventListener(
"removetrack",
updateDefaultSubtitlesCallback
);
};
}
]
},
mediaChaptersCues: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
if (!media)
return [];
const [chaptersTrack] = getTextTracksList(media, {
kind: TextTrackKinds.CHAPTERS
});
return Array.from((_a = chaptersTrack == null ? void 0 : chaptersTrack.cues) != null ? _a : []).map(
({ text, startTime, endTime }) => ({
text,
startTime,
endTime
})
);
},
mediaEvents: ["loadstart", "loadedmetadata"],
textTracksEvents: ["addtrack", "removetrack", "change"],
stateOwnersUpdateHandlers: [
(handler, stateOwners) => {
var _a;
const { media } = stateOwners;
if (!media)
return;
const chaptersTrack = media.querySelector(
'track[kind="chapters"][default][src]'
);
const shadowChaptersTrack = (_a = media.shadowRoot) == null ? void 0 : _a.querySelector(
':is(video,audio) > track[kind="chapters"][default][src]'
);
chaptersTrack == null ? void 0 : chaptersTrack.addEventListener("load", handler);
shadowChaptersTrack == null ? void 0 : shadowChaptersTrack.addEventListener("load", handler);
return () => {
chaptersTrack == null ? void 0 : chaptersTrack.removeEventListener("load", handler);
shadowChaptersTrack == null ? void 0 : shadowChaptersTrack.removeEventListener("load", handler);
};
}
]
},
// Modeling state tied to root node
mediaIsPip: {
get(stateOwners) {
var _a, _b;
const { media, documentElement } = stateOwners;
if (!media || !documentElement)
return false;
if (!documentElement.pictureInPictureElement)
return false;
if (documentElement.pictureInPictureElement === media)
return true;
if (documentElement.pictureInPictureElement instanceof HTMLMediaElement) {
if (!((_a = media.localName) == null ? void 0 : _a.includes("-")))
return false;
return containsComposedNode(
media,
documentElement.pictureInPictureElement
);
}
if (documentElement.pictureInPictureElement.localName.includes("-")) {
let currentRoot = documentElement.pictureInPictureElement.shadowRoot;
while (currentRoot == null ? void 0 : currentRoot.pictureInPictureElement) {
if (currentRoot.pictureInPictureElement === media)
return true;
currentRoot = (_b = currentRoot.pictureInPictureElement) == null ? void 0 : _b.shadowRoot;
}
}
return false;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!media)
return;
if (value) {
if (!document.pictureInPictureEnabled) {
console.warn("MediaChrome: Picture-in-picture is not enabled");
return;
}
if (!media.requestPictureInPicture) {
console.warn(
"MediaChrome: The current media does not support picture-in-picture"
);
return;
}
const warnNotReady = () => {
console.warn(
"MediaChrome: The media is not ready for picture-in-picture. It must have a readyState > 0."
);
};
media.requestPictureInPicture().catch((err) => {
if (err.code === 11) {
if (!media.src) {
console.warn(
"MediaChrome: The media is not ready for picture-in-picture. It must have a src set."
);
return;
}
if (media.readyState === 0 && media.preload === "none") {
const cleanup = () => {
media.removeEventListener("loadedmetadata", tryPip);
media.preload = "none";
};
const tryPip = () => {
media.requestPictureInPicture().catch(warnNotReady);
cleanup();
};
media.addEventListener("loadedmetadata", tryPip);
media.preload = "metadata";
setTimeout(() => {
if (media.readyState === 0)
warnNotReady();
cleanup();
}, 1e3);
} else {
throw err;
}
} else {
throw err;
}
});
} else if (document.pictureInPictureElement) {
document.exitPictureInPicture();
}
},
mediaEvents: ["enterpictureinpicture", "leavepictureinpicture"]
},
mediaRenditionList: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return [...(_a = media == null ? void 0 : media.videoRenditions) != null ? _a : []].map((videoRendition) => ({
...videoRendition
}));
},
mediaEvents: ["emptied", "loadstart"],
videoRenditionsEvents: ["addrendition", "removerendition"]
},
/** @TODO Model this as a derived value? (CJP) */
mediaRenditionSelected: {
get(stateOwners) {
var _a, _b, _c;
const { media } = stateOwners;
return (_c = (_b = media == null ? void 0 : media.videoRenditions) == null ? void 0 : _b[(_a = media.videoRenditions) == null ? void 0 : _a.selectedIndex]) == null ? void 0 : _c.id;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!(media == null ? void 0 : media.videoRenditions)) {
console.warn(
"MediaController: Rendition selection not supported by this media."
);
return;
}
const renditionId = value;
const index = Array.prototype.findIndex.call(
media.videoRenditions,
(r) => r.id == renditionId
);
if (media.videoRenditions.selectedIndex != index) {
media.videoRenditions.selectedIndex = index;
}
},
mediaEvents: ["emptied"],
videoRenditionsEvents: ["addrendition", "removerendition", "change"]
},
mediaAudioTrackList: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
return [...(_a = media == null ? void 0 : media.audioTracks) != null ? _a : []];
},
mediaEvents: ["emptied", "loadstart"],
audioTracksEvents: ["addtrack", "removetrack"]
},
mediaAudioTrackEnabled: {
get(stateOwners) {
var _a, _b;
const { media } = stateOwners;
return (_b = [...(_a = media == null ? void 0 : media.audioTracks) != null ? _a : []].find(
(audioTrack) => audioTrack.enabled
)) == null ? void 0 : _b.id;
},
set(value, stateOwners) {
const { media } = stateOwners;
if (!(media == null ? void 0 : media.audioTracks)) {
console.warn(
"MediaChrome: Audio track selection not supported by this media."
);
return;
}
const audioTrackId = value;
for (const track of media.audioTracks) {
track.enabled = audioTrackId == track.id;
}
},
mediaEvents: ["emptied"],
audioTracksEvents: ["addtrack", "removetrack", "change"]
},
mediaIsFullscreen: {
get(stateOwners) {
return isFullscreen(stateOwners);
},
set(value, stateOwners) {
if (!value) {
exitFullscreen(stateOwners);
} else {
enterFullscreen(stateOwners);
}
},
// older Safari version may require webkit-specific events
rootEvents: ["fullscreenchange", "webkitfullscreenchange"],
// iOS requires webkit-specific events on the video.
mediaEvents: ["webkitbeginfullscreen", "webkitendfullscreen", "webkitpresentationmodechanged"]
},
mediaIsCasting: {
// Note this relies on a customized castable-video element.
get(stateOwners) {
var _a;
const { media } = stateOwners;
if (!(media == null ? void 0 : media.remote) || ((_a = media.remote) == null ? void 0 : _a.state) === "disconnected")
return false;
return !!media.remote.state;
},
set(value, stateOwners) {
var _a, _b;
const { media } = stateOwners;
if (!media)
return;
if (value && ((_a = media.remote) == null ? void 0 : _a.state) !== "disconnected")
return;
if (!value && ((_b = media.remote) == null ? void 0 : _b.state) !== "connected")
return;
if (typeof media.remote.prompt !== "function") {
console.warn(
"MediaChrome: Casting is not supported in this environment"
);
return;
}
media.remote.prompt().catch(() => {
});
},
remoteEvents: ["connect", "connecting", "disconnect"]
},
// NOTE: Newly added state for tracking airplaying
mediaIsAirplaying: {
// NOTE: Cannot know if airplaying since Safari doesn't fully support HTMLMediaElement::remote yet (e.g. remote::state) (CJP)
get() {
return false;
},
set(_value, stateOwners) {
const { media } = stateOwners;
if (!media)
return;
if (!(media.webkitShowPlaybackTargetPicker && globalThis.WebKitPlaybackTargetAvailabilityEvent)) {
console.warn(
"MediaChrome: received a request to select AirPlay but AirPlay is not supported in this environment"
);
return;
}
media.webkitShowPlaybackTargetPicker();
},
mediaEvents: ["webkitcurrentplaybacktargetiswirelesschanged"]
},
mediaFullscreenUnavailable: {
get(stateOwners) {
const { media } = stateOwners;
if (!fullscreenSupported || !hasFullscreenSupport(media))
return AvailabilityStates.UNSUPPORTED;
return void 0;
}
},
mediaPipUnavailable: {
get(stateOwners) {
const { media } = stateOwners;
if (!pipSupported || !hasPipSupport(media))
return AvailabilityStates.UNSUPPORTED;
}
},
mediaVolumeUnavailable: {
get(stateOwners) {
const { media } = stateOwners;
if (volumeSupported === false || (media == null ? void 0 : media.volume) == void 0) {
return AvailabilityStates.UNSUPPORTED;
}
return void 0;
},
// NOTE: Slightly different impl here. Added generic support for
// "stateOwnersUpdateHandlers" since the original impl had to hack around
// race conditions. (CJP)
stateOwnersUpdateHandlers: [
(handler) => {
if (volumeSupported == null) {
volumeSupportPromise.then(
(supported) => handler(supported ? void 0 : AvailabilityStates.UNSUPPORTED)
);
}
}
]
},
mediaCastUnavailable: {
// @ts-ignore
get(stateOwners, { availability = "not-available" } = {}) {
var _a;
const { media } = stateOwners;
if (!castSupported || !((_a = media == null ? void 0 : media.remote) == null ? void 0 : _a.state)) {
return AvailabilityStates.UNSUPPORTED;
}
if (availability == null || availability === "available")
return void 0;
return AvailabilityStates.UNAVAILABLE;
},
stateOwnersUpdateHandlers: [
(handler, stateOwners) => {
var _a;
const { media } = stateOwners;
if (!media)
return;
const remotePlaybackDisabled = media.disableRemotePlayback || media.hasAttribute("disableremoteplayback");
if (!remotePlaybackDisabled) {
(_a = media == null ? void 0 : media.remote) == null ? void 0 : _a.watchAvailability((availabilityBool) => {
const availability = availabilityBool ? "available" : "not-available";
handler({ availability });
}).catch((error) => {
if (error.name === "NotSupportedError") {
handler({ availability: null });
} else {
handler({ availability: "not-available" });
}
});
}
return () => {
var _a2;
(_a2 = media == null ? void 0 : media.remote) == null ? void 0 : _a2.cancelWatchAvailability().catch(() => {
});
};
}
]
},
mediaAirplayUnavailable: {
get(_stateOwners, event) {
if (!airplaySupported)
return AvailabilityStates.UNSUPPORTED;
if ((event == null ? void 0 : event.availability) === "not-available") {
return AvailabilityStates.UNAVAILABLE;
}
return void 0;
},
// NOTE: Keeping this event, as it's still the documented way of monitoring
// for AirPlay availability from Apple.
// See: https://developer.apple.com/documentation/webkitjs/adding_an_airplay_button_to_your_safari_media_controls#2940021 (CJP)
mediaEvents: ["webkitplaybacktargetavailabilitychanged"],
stateOwnersUpdateHandlers: [
(handler, stateOwners) => {
var _a;
const { media } = stateOwners;
if (!media)
return;
const remotePlaybackDisabled = media.disableRemotePlayback || media.hasAttribute("disableremoteplayback");
if (!remotePlaybackDisabled) {
(_a = media == null ? void 0 : media.remote) == null ? void 0 : _a.watchAvailability((availabilityBool) => {
const availability = availabilityBool ? "available" : "not-available";
handler({ availability });
}).catch((error) => {
if (error.name === "NotSupportedError") {
handler({ availability: null });
} else {
handler({ availability: "not-available" });
}
});
}
return () => {
var _a2;
(_a2 = media == null ? void 0 : media.remote) == null ? void 0 : _a2.cancelWatchAvailability().catch(() => {
});
};
}
]
},
mediaRenditionUnavailable: {
get(stateOwners) {
var _a;
const { media } = stateOwners;
if (!(media == null ? void 0 : media.videoRenditions)) {
return AvailabilityStates.UNSUPPORTED;
}
if (!((_a = media.videoRenditions) == null ? void 0 : _a.length)) {
return AvailabilityStates.UNAVAILABLE;
}
return void 0;
},
mediaEvents: ["emptied", "loadstart"],
videoRenditionsEvents: ["addrendition", "removerendition"]
},
mediaAudioTrackUnavailable: {
get(stateOwners) {
var _a, _b;
const { media } = stateOwners;
if (!(media == null ? void 0 : media.audioTracks)) {
return AvailabilityStates.UNSUPPORTED;
}
if (((_b = (_a = media.audioTracks) == null ? void 0 : _a.length) != null ? _b : 0) <= 1) {
return AvailabilityStates.UNAVAILABLE;
}
return void 0;
},
mediaEvents: ["emptied", "loadstart"],
audioTracksEvents: ["addtrack", "removetrack"]
}
};
export {
prepareStateOwners,
stateMediator,
volumeSupportPromise
};

View File

@@ -0,0 +1,6 @@
import { TextTrackLike } from '../utils/TextTrackLike.js';
export declare const getSubtitleTracks: (stateOwners: any) => TextTrackLike[];
export declare const getShowingSubtitleTracks: (stateOwners: any) => TextTrackLike[];
export declare const toggleSubtitleTracks: (stateOwners: any, force: boolean) => void;
export declare const areValuesEq: (x: any, y: any) => boolean;
export declare const areArraysEq: (xs: number[], ys: number[]) => boolean;

View File

@@ -0,0 +1,90 @@
import { TextTrackKinds, TextTrackModes } from "../constants.js";
import { getTextTracksList, updateTracksModeTo } from "../utils/captions.js";
const getSubtitleTracks = (stateOwners) => {
return getTextTracksList(stateOwners.media, (textTrack) => {
return [TextTrackKinds.SUBTITLES, TextTrackKinds.CAPTIONS].includes(
textTrack.kind
);
}).sort((a, b) => a.kind >= b.kind ? 1 : -1);
};
const getShowingSubtitleTracks = (stateOwners) => {
return getTextTracksList(stateOwners.media, (textTrack) => {
return textTrack.mode === TextTrackModes.SHOWING && [TextTrackKinds.SUBTITLES, TextTrackKinds.CAPTIONS].includes(
textTrack.kind
);
});
};
const toggleSubtitleTracks = (stateOwners, force) => {
const tracks = getSubtitleTracks(stateOwners);
const showingSubitleTracks = getShowingSubtitleTracks(stateOwners);
const subtitlesShowing = !!showingSubitleTracks.length;
if (!tracks.length)
return;
if (force === false || subtitlesShowing && force !== true) {
updateTracksModeTo(TextTrackModes.DISABLED, tracks, showingSubitleTracks);
} else if (force === true || !subtitlesShowing && force !== false) {
let subTrack = tracks[0];
const { options } = stateOwners;
if (!(options == null ? void 0 : options.noSubtitlesLangPref)) {
const subtitlesPref = globalThis.localStorage.getItem(
"media-chrome-pref-subtitles-lang"
);
const userLangPrefs = subtitlesPref ? [subtitlesPref, ...globalThis.navigator.languages] : globalThis.navigator.languages;
const preferredAvailableSubs = tracks.filter((textTrack) => {
return userLangPrefs.some(
(lang) => textTrack.language.toLowerCase().startsWith(lang.split("-")[0])
);
}).sort((textTrackA, textTrackB) => {
const idxA = userLangPrefs.findIndex(
(lang) => textTrackA.language.toLowerCase().startsWith(lang.split("-")[0])
);
const idxB = userLangPrefs.findIndex(
(lang) => textTrackB.language.toLowerCase().startsWith(lang.split("-")[0])
);
return idxA - idxB;
});
if (preferredAvailableSubs[0]) {
subTrack = preferredAvailableSubs[0];
}
}
const { language, label, kind } = subTrack;
updateTracksModeTo(TextTrackModes.DISABLED, tracks, showingSubitleTracks);
updateTracksModeTo(TextTrackModes.SHOWING, tracks, [
{ language, label, kind }
]);
}
};
const areValuesEq = (x, y) => {
if (x === y)
return true;
if (typeof x !== typeof y)
return false;
if (typeof x === "number" && Number.isNaN(x) && Number.isNaN(y))
return true;
if (typeof x !== "object")
return false;
if (Array.isArray(x))
return areArraysEq(x, y);
return Object.entries(x).every(
// NOTE: Checking key in y to disambiguate between between missing keys and keys whose value are undefined (CJP)
([key, value]) => key in y && areValuesEq(value, y[key])
);
};
const areArraysEq = (xs, ys) => {
const xIsArray = Array.isArray(xs);
const yIsArray = Array.isArray(ys);
if (xIsArray !== yIsArray)
return false;
if (!(xIsArray || yIsArray))
return true;
if (xs.length !== ys.length)
return false;
return xs.every((x, i) => areValuesEq(x, ys[i]));
};
export {
areArraysEq,
areValuesEq,
getShowingSubtitleTracks,
getSubtitleTracks,
toggleSubtitleTracks
};