node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

109
server/node_modules/castable-video/README.md generated vendored Normal file
View File

@@ -0,0 +1,109 @@
# `<castable-video>`
[![NPM Version](https://img.shields.io/npm/v/castable-video?style=flat-square&color=informational)](https://www.npmjs.com/package/castable-video)
[![NPM Downloads](https://img.shields.io/npm/dm/castable-video?style=flat-square&color=informational&label=npm)](https://www.npmjs.com/package/castable-video)
[![jsDelivr hits (npm)](https://img.shields.io/jsdelivr/npm/hm/castable-video?style=flat-square&color=%23FF5627)](https://www.jsdelivr.com/package/npm/castable-video)
[![npm bundle size](https://img.shields.io/bundlephobia/minzip/castable-video?style=flat-square&color=success&label=gzip)](https://bundlephobia.com/result?p=castable-video)
[Cast](https://developers.google.com/cast) your video element to the big screen with ease!
The lightweight `CastableVideoElement` class extends the native `HTMLVideoElement` API
and adds casting functionality to any video element.
The API aims to be equivalent to the
[Remote Playback API](https://developer.mozilla.org/en-US/docs/Web/API/RemotePlayback)
with a few extra element attributes specific to casting.
It was primarily built for use in [Media Chrome](https://github.com/muxinc/media-chrome)
but it works great with any custom video controls as you can see in the example.
```html
<script type="module" src="https://cdn.jsdelivr.net/npm/castable-video/+esm"></script>
<castable-video
id="castable"
src="https://stream.mux.com/DS00Spx1CV902MCtPj5WknGlR102V5HFkDe/high.mp4"
></castable-video>
<button onclick="castable.play()">Play</button>
<button onclick="castable.pause()">Pause</button>
<button id="castBtn" hidden onclick="castable.prompt()">Cast...</button>
<script type="module">
castable.remote.watchAvailability((available) => {
castBtn.hidden = !available;
});
castable.remote.addEventListener('connecting', function (event) {
console.log(event.type);
});
castable.remote.addEventListener('connect', function (event) {
console.log(event.type);
});
castable.remote.addEventListener('disconnect', function (event) {
console.log(event.type);
});
</script>
```
## Remote Playback API
https://developer.mozilla.org/en-US/docs/Web/API/RemotePlayback
### Methods
- `video.remote.prompt()`: open the browser casting menu.
- `video.remote.watchAvailability(callback)`: watch if remote devices are available.
- `video.remote.cancelWatchAvailability(callback)`: cancel watching for remote devices.
### Properties
- `video.remote.state`: the current cast state.
- `disconnected`: Cast devices are available, but a cast session is not established.
- `connecting`: Cast session is being established.
- `connected`: Cast session is established.
- `castOptions` [readonly]: the cast options passed to the cast session.
- `receiverApplicationId`: defaults to Chromecast default receiver.
- `autoJoinPolicy` ('ORIGIN_SCOPED')
- `androidReceiverCompatible` (false): if `true` enables Cast Connect.
- `language` ('en-US')
- `resumeSavedSession` (true)
### Events
- `connecting`: fires when a cast session is being established.
- `connect`: fires when starting casting.
- `disconnect`: fires when stopping casting.
e.g. `video.remote.addEventListener('connect', () => {})`
### Attributes
Each attribute has a corresponding element property. e.g. `video.castSrc` or `video.castStreamType`.
- `cast-src`: if Chromecast requires a different source than the one loaded.
For example this would be needed if video src is a blob when using MSE.
- `cast-stream-type`: add `<castable-video cast-stream-type="live">` for live streams.
- `cast-content-type`: required if Chromecast can't derive the content type from the source.
- `cast-receiver`: the Chromecast receiver app id. Defaults to `CC1AD845`.
### Usage with MSE (for example Hls.js or Dash.js)
When your media element is using Media Source Extension (MSE) element has a src like `src="blob://...`. If you are using [Hls.js](https://github.com/video-dev/hls.js/) or [Dash.js](https://github.com/Dash-Industry-Forum/dash.js/) you may have noticed this. Because of the `blob://..`, castable-video has no way to know what the source is, so you must set the `cast-src=` attribute to the full URL of the video source.
## Related
- [Media Chrome](https://github.com/muxinc/media-chrome) Your media player's dancing suit. 🕺
- [`<youtube-video>`](https://github.com/muxinc/media-elements/tree/main/packages/youtube-video-element) A custom element for the YouTube player.
- [`<vimeo-video>`](https://github.com/muxinc/media-elements/tree/main/packages/vimeo-video-element) A custom element for the Vimeo player.
- [`<wistia-video>`](https://github.com/muxinc/media-elements/tree/main/packages/wistia-video-element) A custom element for the Wistia player.
- [`<jwplayer-video>`](https://github.com/muxinc/media-elements/tree/main/packages/jwplayer-video-element) A custom element for the JW player.
- [`<videojs-video>`](https://github.com/muxinc/media-elements/tree/main/packages/videojs-video-element) A custom element for Video.js.
- [`<cloudflare-video>`](https://github.com/muxinc/media-elements/tree/main/packages/cloudflare-video-element) A custom element for the Cloudflare player.
- [`<hls-video>`](https://github.com/muxinc/media-elements/tree/main/packages/hls-video-element) A web component for playing HTTP Live Streaming (HLS) videos.
- [`<mux-player>`](https://github.com/muxinc/elements/tree/main/packages/mux-player) The official Mux-flavored video player custom element.
- [`<mux-video>`](https://github.com/muxinc/elements/tree/main/packages/mux-video) A Mux-flavored HTML5 video element w/ hls.js and Mux data builtin.

View File

@@ -0,0 +1 @@
export function CastableMediaMixin<T extends CustomElementConstructor>(superclass: T): T;

330
server/node_modules/castable-video/castable-mixin.js generated vendored Normal file
View File

@@ -0,0 +1,330 @@
/* global chrome */
import { RemotePlayback } from './castable-remote-playback.js';
import {
privateProps,
requiresCastFramework,
loadCastFramework,
currentSession,
getDefaultCastOptions,
isHls,
getPlaylistSegmentFormat
} from './castable-utils.js';
/**
* CastableMediaMixin
*
* This mixin function provides a way to compose multiple classes.
* @see https://justinfagnani.com/2015/12/21/real-mixins-with-javascript-classes/
*
* @param {HTMLMediaElement} superclass - HTMLMediaElement or an extended class of it.
* @return {CastableMedia}
*/
export const CastableMediaMixin = (superclass) =>
class CastableMedia extends superclass {
static observedAttributes = [
...(superclass.observedAttributes ?? []),
'cast-src',
'cast-content-type',
'cast-stream-type',
'cast-receiver',
];
#localState = { paused: false };
#castOptions = getDefaultCastOptions();
#castCustomData;
#remote;
get remote() {
if (this.#remote) return this.#remote;
if (requiresCastFramework()) {
// No need to load the Cast framework if it's disabled.
if (!this.disableRemotePlayback) {
loadCastFramework();
}
privateProps.set(this, {
loadOnPrompt: () => this.#loadOnPrompt()
});
return (this.#remote = new RemotePlayback(this));
}
return super.remote;
}
get #castPlayer() {
return privateProps.get(this.remote)?.getCastPlayer?.();
}
attributeChangedCallback(attrName, oldValue, newValue) {
super.attributeChangedCallback(attrName, oldValue, newValue);
if (attrName === 'cast-receiver' && newValue) {
this.#castOptions.receiverApplicationId = newValue;
return;
}
if (!this.#castPlayer) return;
switch (attrName) {
case 'cast-stream-type':
case 'cast-src':
this.load();
break;
}
}
async #loadOnPrompt() {
// Pause locally when the session is created.
this.#localState.paused = super.paused;
super.pause();
// Sync over the muted state but not volume, 100% is different on TV's :P
this.muted = super.muted;
try {
await this.load();
} catch (err) {
console.error(err);
}
}
async load() {
if (!this.#castPlayer) return super.load();
const mediaInfo = new chrome.cast.media.MediaInfo(this.castSrc, this.castContentType);
mediaInfo.customData = this.castCustomData;
// Manually add text tracks with a `src` attribute.
// M3U8's load text tracks in the receiver, handle these in the media loaded event.
const subtitles = [...this.querySelectorAll('track')].filter(
({ kind, src }) => src && (kind === 'subtitles' || kind === 'captions')
);
const activeTrackIds = [];
let textTrackIdCount = 0;
if (subtitles.length) {
mediaInfo.tracks = subtitles.map((trackEl) => {
const trackId = ++textTrackIdCount;
// only activate 1 subtitle text track.
if (activeTrackIds.length === 0 && trackEl.track.mode === 'showing') {
activeTrackIds.push(trackId);
}
const track = new chrome.cast.media.Track(
trackId,
chrome.cast.media.TrackType.TEXT
);
track.trackContentId = trackEl.src;
track.trackContentType = 'text/vtt';
track.subtype =
trackEl.kind === 'captions'
? chrome.cast.media.TextTrackType.CAPTIONS
: chrome.cast.media.TextTrackType.SUBTITLES;
track.name = trackEl.label;
track.language = trackEl.srclang;
return track;
});
}
if (this.castStreamType === 'live') {
mediaInfo.streamType = chrome.cast.media.StreamType.LIVE;
} else {
mediaInfo.streamType = chrome.cast.media.StreamType.BUFFERED;
}
mediaInfo.metadata = new chrome.cast.media.GenericMediaMetadata();
mediaInfo.metadata.title = this.title;
mediaInfo.metadata.images = [{ url: this.poster }];
if (isHls(this.castSrc)) {
const segmentFormat = await getPlaylistSegmentFormat(this.castSrc);
const isFragmentedMP4 = segmentFormat?.includes('m4s') || segmentFormat?.includes('mp4');
if (isFragmentedMP4) {
mediaInfo.hlsSegmentFormat = chrome.cast.media.HlsSegmentFormat.FMP4;
mediaInfo.hlsVideoSegmentFormat = chrome.cast.media.HlsVideoSegmentFormat.FMP4;
}
}
const request = new chrome.cast.media.LoadRequest(mediaInfo);
request.currentTime = super.currentTime ?? 0;
request.autoplay = !this.#localState.paused;
request.activeTrackIds = activeTrackIds;
await currentSession()?.loadMedia(request);
this.dispatchEvent(new Event('volumechange'));
}
play() {
if (this.#castPlayer) {
if (this.#castPlayer.isPaused) {
this.#castPlayer.controller?.playOrPause();
}
return;
}
return super.play();
}
pause() {
if (this.#castPlayer) {
if (!this.#castPlayer.isPaused) {
this.#castPlayer.controller?.playOrPause();
}
return;
}
super.pause();
}
/**
* @see https://developers.google.com/cast/docs/reference/web_sender/cast.framework.CastOptions
* @readonly
*
* @typedef {Object} CastOptions
* @property {string} [receiverApplicationId='CC1AD845'] - The app id of the cast receiver.
* @property {string} [autoJoinPolicy='origin_scoped'] - The auto join policy.
* @property {string} [language='en-US'] - The language to use for the cast receiver.
* @property {boolean} [androidReceiverCompatible=false] - Whether to use the Cast Connect.
* @property {boolean} [resumeSavedSession=true] - Whether to resume the last session.
*
* @return {CastOptions}
*/
get castOptions() {
return this.#castOptions;
}
get castReceiver() {
return this.getAttribute('cast-receiver') ?? undefined;
}
set castReceiver(val) {
if (this.castReceiver == val) return;
this.setAttribute('cast-receiver', `${val}`);
}
// Allow the cast source url to be different than <video src>, could be a blob.
get castSrc() {
// Try the first <source src> for usage with even more native markup.
return (
this.getAttribute('cast-src') ??
this.querySelector('source')?.src ??
this.currentSrc
);
}
set castSrc(val) {
if (this.castSrc == val) return;
this.setAttribute('cast-src', `${val}`);
}
get castContentType() {
return this.getAttribute('cast-content-type') ?? undefined;
}
set castContentType(val) {
this.setAttribute('cast-content-type', `${val}`);
}
get castStreamType() {
// NOTE: Per https://github.com/video-dev/media-ui-extensions/issues/3 `streamType` may yield `"unknown"`
return this.getAttribute('cast-stream-type') ?? this.streamType ?? undefined;
}
set castStreamType(val) {
this.setAttribute('cast-stream-type', `${val}`);
}
get castCustomData() {
return this.#castCustomData;
}
set castCustomData(val) {
const valType = typeof val;
if (!['object', 'undefined'].includes(valType)) {
console.error(`castCustomData must be nullish or an object but value was of type ${valType}`);
return;
}
this.#castCustomData = val;
}
get readyState() {
if (this.#castPlayer) {
switch (this.#castPlayer.playerState) {
case chrome.cast.media.PlayerState.IDLE:
return 0;
case chrome.cast.media.PlayerState.BUFFERING:
return 2;
default:
return 3;
}
}
return super.readyState;
}
get paused() {
if (this.#castPlayer) return this.#castPlayer.isPaused;
return super.paused;
}
get muted() {
if (this.#castPlayer) return this.#castPlayer?.isMuted;
return super.muted;
}
set muted(val) {
if (this.#castPlayer) {
if (
(val && !this.#castPlayer.isMuted) ||
(!val && this.#castPlayer.isMuted)
) {
this.#castPlayer.controller?.muteOrUnmute();
}
return;
}
super.muted = val;
}
get volume() {
if (this.#castPlayer) return this.#castPlayer?.volumeLevel ?? 1;
return super.volume;
}
set volume(val) {
if (this.#castPlayer) {
this.#castPlayer.volumeLevel = +val;
this.#castPlayer.controller?.setVolumeLevel();
return;
}
super.volume = val;
}
get duration() {
// castPlayer duration returns `0` when no media is loaded.
if (this.#castPlayer && this.#castPlayer?.isMediaLoaded) {
return this.#castPlayer?.duration ?? NaN;
}
return super.duration;
}
get currentTime() {
if (this.#castPlayer && this.#castPlayer?.isMediaLoaded) {
return this.#castPlayer?.currentTime ?? 0;
}
return super.currentTime;
}
set currentTime(val) {
if (this.#castPlayer) {
this.#castPlayer.currentTime = val;
this.#castPlayer.controller?.seek();
return;
}
super.currentTime = val;
}
};
export const CastableVideoMixin = CastableMediaMixin;

View File

@@ -0,0 +1,386 @@
/* global chrome, cast */
import {
privateProps,
IterableWeakSet,
InvalidStateError,
NotSupportedError,
onCastApiAvailable,
castContext,
currentSession,
currentMedia,
editTracksInfo,
getMediaStatus,
setCastOptions
} from './castable-utils.js';
const remoteInstances = new IterableWeakSet();
const castElementRef = new WeakSet();
let cf;
onCastApiAvailable(() => {
if (!globalThis.chrome?.cast?.isAvailable) {
// Useful to see in verbose logs if this shows undefined or false.
console.debug('chrome.cast.isAvailable', globalThis.chrome?.cast?.isAvailable);
return;
}
if (!cf) {
cf = cast.framework;
castContext().addEventListener(cf.CastContextEventType.CAST_STATE_CHANGED, (e) => {
remoteInstances.forEach((r) => privateProps.get(r).onCastStateChanged?.(e));
});
castContext().addEventListener(cf.CastContextEventType.SESSION_STATE_CHANGED, (e) => {
remoteInstances.forEach((r) => privateProps.get(r).onSessionStateChanged?.(e));
});
remoteInstances.forEach((r) => privateProps.get(r).init?.());
}
});
let remotePlaybackCallbackIdCount = 0;
/**
* Remote Playback shim for the Google cast SDK.
* https://w3c.github.io/remote-playback/
*/
export class RemotePlayback extends EventTarget {
#media;
#isInit;
#remotePlayer;
#remoteListeners;
#state = 'disconnected';
#available = false;
#callbacks = new Set();
#callbackIds = new WeakMap();
constructor(media) {
super();
this.#media = media;
remoteInstances.add(this);
privateProps.set(this, {
init: () => this.#init(),
onCastStateChanged: () => this.#onCastStateChanged(),
onSessionStateChanged: () => this.#onSessionStateChanged(),
getCastPlayer: () => this.#castPlayer,
});
this.#init();
}
get #castPlayer() {
if (castElementRef.has(this.#media)) return this.#remotePlayer;
return undefined;
}
/**
* https://developer.mozilla.org/en-US/docs/Web/API/RemotePlayback/state
* @return {'disconnected'|'connecting'|'connected'}
*/
get state() {
return this.#state;
}
async watchAvailability(callback) {
if (this.#media.disableRemotePlayback) {
throw new InvalidStateError('disableRemotePlayback attribute is present.');
}
this.#callbackIds.set(callback, ++remotePlaybackCallbackIdCount);
this.#callbacks.add(callback);
// https://w3c.github.io/remote-playback/#getting-the-remote-playback-devices-availability-information
queueMicrotask(() => callback(this.#hasDevicesAvailable()));
return remotePlaybackCallbackIdCount;
}
async cancelWatchAvailability(callback) {
if (this.#media.disableRemotePlayback) {
throw new InvalidStateError('disableRemotePlayback attribute is present.');
}
if (callback) {
this.#callbacks.delete(callback);
} else {
this.#callbacks.clear();
}
}
async prompt() {
if (this.#media.disableRemotePlayback) {
throw new InvalidStateError('disableRemotePlayback attribute is present.');
}
if (!globalThis.chrome?.cast?.isAvailable) {
throw new NotSupportedError('The RemotePlayback API is disabled on this platform.');
}
const willDisconnect = castElementRef.has(this.#media);
castElementRef.add(this.#media);
setCastOptions(this.#media.castOptions);
Object.entries(this.#remoteListeners).forEach(([event, listener]) => {
this.#remotePlayer.controller.addEventListener(event, listener);
});
try {
// Open browser cast menu.
await castContext().requestSession();
} catch (err) {
// If there will be no disconnect, reset some state here.
if (!willDisconnect) {
castElementRef.delete(this.#media);
}
// Don't throw an error if disconnecting or cancelling.
if (err === 'cancel') {
return;
}
throw new Error(err);
}
privateProps.get(this.#media)?.loadOnPrompt?.();
}
#disconnect() {
if (!castElementRef.has(this.#media)) return;
Object.entries(this.#remoteListeners).forEach(([event, listener]) => {
this.#remotePlayer.controller.removeEventListener(event, listener);
});
castElementRef.delete(this.#media);
// isMuted is not in savedPlayerState. should we sync this back to local?
this.#media.muted = this.#remotePlayer.isMuted;
this.#media.currentTime = this.#remotePlayer.savedPlayerState.currentTime;
if (this.#remotePlayer.savedPlayerState.isPaused === false) {
this.#media.play();
}
}
#hasDevicesAvailable() {
// Cast state: NO_DEVICES_AVAILABLE, NOT_CONNECTED, CONNECTING, CONNECTED
// https://developers.google.com/cast/docs/reference/web_sender/cast.framework#.CastState
const castState = castContext()?.getCastState();
return castState && castState !== 'NO_DEVICES_AVAILABLE';
}
#onCastStateChanged() {
// Cast state: NO_DEVICES_AVAILABLE, NOT_CONNECTED, CONNECTING, CONNECTED
// https://developers.google.com/cast/docs/reference/web_sender/cast.framework#.CastState
const castState = castContext().getCastState();
if (castElementRef.has(this.#media)) {
if (castState === 'CONNECTING') {
this.#state = 'connecting';
this.dispatchEvent(new Event('connecting'));
}
}
if (!this.#available && castState?.includes('CONNECT')) {
this.#available = true;
for (let callback of this.#callbacks) callback(true);
}
else if (this.#available && (!castState || castState === 'NO_DEVICES_AVAILABLE')) {
this.#available = false;
for (let callback of this.#callbacks) callback(false);
}
}
async #onSessionStateChanged() {
// Session states: NO_SESSION, SESSION_STARTING, SESSION_STARTED, SESSION_START_FAILED,
// SESSION_ENDING, SESSION_ENDED, SESSION_RESUMED
// https://developers.google.com/cast/docs/reference/web_sender/cast.framework#.SessionState
const { SESSION_RESUMED } = cf.SessionState;
if (castContext().getSessionState() === SESSION_RESUMED) {
/**
* Figure out if this was the video that started the resumed session.
* @TODO make this more specific than just checking against the video src!! (WL)
*
* If this video element can get the same unique id on each browser refresh
* it would be possible to pass this unique id w/ `LoadRequest.customData`
* and verify against currentMedia().customData below.
*/
if (this.#media.castSrc === currentMedia()?.media.contentId) {
castElementRef.add(this.#media);
Object.entries(this.#remoteListeners).forEach(([event, listener]) => {
this.#remotePlayer.controller.addEventListener(event, listener);
});
/**
* There is cast framework resume session bug when you refresh the page a few
* times the this.#remotePlayer.currentTime will not be in sync with the receiver :(
* The below status request syncs it back up.
*/
try {
await getMediaStatus(new chrome.cast.media.GetStatusRequest());
} catch (error) {
console.error(error);
}
// Dispatch the play, playing events manually to sync remote playing state.
this.#remoteListeners[cf.RemotePlayerEventType.IS_PAUSED_CHANGED]();
this.#remoteListeners[cf.RemotePlayerEventType.PLAYER_STATE_CHANGED]();
}
}
}
#init() {
if (!cf || this.#isInit) return;
this.#isInit = true;
setCastOptions(this.#media.castOptions);
/**
* @TODO add listeners for addtrack, removetrack (WL)
* This only has an impact on <track> with a `src` because these have to be
* loaded manually in the load() method. This will require a new load() call
* for each added/removed track w/ src.
*/
this.#media.textTracks.addEventListener('change', () => this.#updateRemoteTextTrack());
this.#onCastStateChanged();
this.#remotePlayer = new cf.RemotePlayer();
new cf.RemotePlayerController(this.#remotePlayer);
this.#remoteListeners = {
[cf.RemotePlayerEventType.IS_CONNECTED_CHANGED]: ({ value }) => {
if (value === true) {
this.#state = 'connected';
this.dispatchEvent(new Event('connect'));
} else {
this.#disconnect();
this.#state = 'disconnected';
this.dispatchEvent(new Event('disconnect'));
}
},
[cf.RemotePlayerEventType.DURATION_CHANGED]: () => {
this.#media.dispatchEvent(new Event('durationchange'));
},
[cf.RemotePlayerEventType.VOLUME_LEVEL_CHANGED]: () => {
this.#media.dispatchEvent(new Event('volumechange'));
},
[cf.RemotePlayerEventType.IS_MUTED_CHANGED]: () => {
this.#media.dispatchEvent(new Event('volumechange'));
},
[cf.RemotePlayerEventType.CURRENT_TIME_CHANGED]: () => {
if (!this.#castPlayer?.isMediaLoaded) return;
this.#media.dispatchEvent(new Event('timeupdate'));
},
[cf.RemotePlayerEventType.VIDEO_INFO_CHANGED]: () => {
this.#media.dispatchEvent(new Event('resize'));
},
[cf.RemotePlayerEventType.IS_PAUSED_CHANGED]: () => {
this.#media.dispatchEvent(new Event(this.paused ? 'pause' : 'play'));
},
[cf.RemotePlayerEventType.PLAYER_STATE_CHANGED]: () => {
// Player states: IDLE, PLAYING, PAUSED, BUFFERING
// https://developers.google.com/cast/docs/reference/web_sender/chrome.cast.media#.PlayerState
// pause event is handled above.
if (this.#castPlayer?.playerState === chrome.cast.media.PlayerState.PAUSED) {
return;
}
this.#media.dispatchEvent(
new Event(
{
[chrome.cast.media.PlayerState.PLAYING]: 'playing',
[chrome.cast.media.PlayerState.BUFFERING]: 'waiting',
[chrome.cast.media.PlayerState.IDLE]: 'emptied',
}[this.#castPlayer?.playerState]
)
);
},
[cf.RemotePlayerEventType.IS_MEDIA_LOADED_CHANGED]: async () => {
if (!this.#castPlayer?.isMediaLoaded) return;
// mediaInfo is not immediately available due to a bug? wait one tick
await Promise.resolve();
this.#onRemoteMediaLoaded();
},
};
}
#onRemoteMediaLoaded() {
this.#updateRemoteTextTrack();
}
async #updateRemoteTextTrack() {
if (!this.#castPlayer) return;
// Get the tracks w/ trackId's that have been loaded; manually or via a playlist like a M3U8 or MPD.
const remoteTracks = this.#remotePlayer.mediaInfo?.tracks ?? [];
const remoteSubtitles = remoteTracks.filter(
({ type }) => type === chrome.cast.media.TrackType.TEXT
);
const localSubtitles = [...this.#media.textTracks].filter(
({ kind }) => kind === 'subtitles' || kind === 'captions'
);
// Create a new array from the local subs w/ the trackId's from the remote subs.
const subtitles = remoteSubtitles
.map(({ language, name, trackId }) => {
// Find the corresponding local text track and assign the trackId.
const { mode } =
localSubtitles.find(
(local) => local.language === language && local.label === name
) ?? {};
if (mode) return { mode, trackId };
return false;
})
.filter(Boolean);
const hiddenSubtitles = subtitles.filter(
({ mode }) => mode !== 'showing'
);
const hiddenTrackIds = hiddenSubtitles.map(({ trackId }) => trackId);
const showingSubtitle = subtitles.find(({ mode }) => mode === 'showing');
// Note this could also include audio or video tracks, diff against local state.
const activeTrackIds =
currentSession()?.getSessionObj().media[0]
?.activeTrackIds ?? [];
let requestTrackIds = activeTrackIds;
if (activeTrackIds.length) {
// Filter out all local hidden subtitle trackId's.
requestTrackIds = requestTrackIds.filter(
(id) => !hiddenTrackIds.includes(id)
);
}
if (showingSubtitle?.trackId) {
requestTrackIds = [...requestTrackIds, showingSubtitle.trackId];
}
// Remove duplicate ids.
requestTrackIds = [...new Set(requestTrackIds)];
const arrayEquals = (a, b) =>
a.length === b.length && a.every((a) => b.includes(a));
if (!arrayEquals(activeTrackIds, requestTrackIds)) {
try {
const request = new chrome.cast.media.EditTracksInfoRequest(
requestTrackIds
);
await editTracksInfo(request);
} catch (error) {
console.error(error);
}
}
}
}

178
server/node_modules/castable-video/castable-utils.js generated vendored Normal file
View File

@@ -0,0 +1,178 @@
/* global WeakRef */
export const privateProps = new WeakMap();
export class InvalidStateError extends Error {}
export class NotSupportedError extends Error {}
export class NotFoundError extends Error {}
const HLS_RESPONSE_HEADERS = ['application/x-mpegURL','application/vnd.apple.mpegurl','audio/mpegurl']
// Fallback to a plain Set if WeakRef is not available.
export const IterableWeakSet = globalThis.WeakRef ?
class extends Set {
add(el) {
super.add(new WeakRef(el));
}
forEach(fn) {
super.forEach((ref) => {
const value = ref.deref();
if (value) fn(value);
});
}
} : Set;
export function onCastApiAvailable(callback) {
if (!globalThis.chrome?.cast?.isAvailable) {
globalThis.__onGCastApiAvailable = () => {
// The globalThis.__onGCastApiAvailable callback alone is not reliable for
// the added cast.framework. It's loaded in a separate JS file.
// https://www.gstatic.com/eureka/clank/101/cast_sender.js
// https://www.gstatic.com/cast/sdk/libs/sender/1.0/cast_framework.js
customElements
.whenDefined('google-cast-button')
.then(callback);
};
} else if (!globalThis.cast?.framework) {
customElements
.whenDefined('google-cast-button')
.then(callback);
} else {
callback();
}
}
export function requiresCastFramework() {
// todo: exclude for Android>=56 which supports the Remote Playback API natively.
return globalThis.chrome;
}
export function loadCastFramework() {
const sdkUrl = 'https://www.gstatic.com/cv/js/sender/v1/cast_sender.js?loadCastFramework=1';
if (globalThis.chrome?.cast || document.querySelector(`script[src="${sdkUrl}"]`)) return;
const script = document.createElement('script');
script.src = sdkUrl;
document.head.append(script);
}
export function castContext() {
return globalThis.cast?.framework?.CastContext.getInstance();
}
export function currentSession() {
return castContext()?.getCurrentSession();
}
export function currentMedia() {
return currentSession()?.getSessionObj().media[0];
}
export function editTracksInfo(request) {
return new Promise((resolve, reject) => {
currentMedia().editTracksInfo(request, resolve, reject);
});
}
export function getMediaStatus(request) {
return new Promise((resolve, reject) => {
currentMedia().getStatus(request, resolve, reject);
});
}
export function setCastOptions(options) {
return castContext().setOptions({
...getDefaultCastOptions(),
...options,
});
}
export function getDefaultCastOptions() {
return {
// Set the receiver application ID to your own (created in the
// Google Cast Developer Console), or optionally
// use the chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID
receiverApplicationId: 'CC1AD845',
// Auto join policy can be one of the following three:
// ORIGIN_SCOPED - Auto connect from same appId and page origin
// TAB_AND_ORIGIN_SCOPED - Auto connect from same appId, page origin, and tab
// PAGE_SCOPED - No auto connect
autoJoinPolicy: 'origin_scoped',
// The following flag enables Cast Connect(requires Chrome 87 or higher)
// https://developers.googleblog.com/2020/08/introducing-cast-connect-android-tv.html
androidReceiverCompatible: false,
language: 'en-US',
resumeSavedSession: true,
};
}
//Get the segment format given the end of the URL (.m4s, .ts, etc)
function getFormat(segment) {
if (!segment) return undefined;
const regex = /\.([a-zA-Z0-9]+)(?:\?.*)?$/;
const match = segment.match(regex);
return match ? match[1] : null;
}
function parsePlaylistUrls(playlistContent) {
const lines = playlistContent.split('\n');
const urls = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim();
// Locate available video playlists and get the next line which is the URI (https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-17#section-4.4.6.2)
if (line.startsWith('#EXT-X-STREAM-INF')) {
const nextLine = lines[i + 1] ? lines[i + 1].trim() : '';
if (nextLine && !nextLine.startsWith('#')) {
urls.push(nextLine);
}
}
}
return urls;
}
function parseSegment(playlistContent){
const lines = playlistContent.split('\n');
const url = lines.find(line => !line.trim().startsWith('#') && line.trim() !== '');
return url;
}
export async function isHls(url) {
try {
const response = await fetch(url, {method: 'HEAD'});
const contentType = response.headers.get('Content-Type');
return HLS_RESPONSE_HEADERS.some((header) => contentType === header);
} catch (err) {
console.error('Error while trying to get the Content-Type of the manifest', err);
return false;
}
}
export async function getPlaylistSegmentFormat(url) {
try {
const mainManifestContent = await (await fetch(url)).text();
let availableChunksContent = mainManifestContent;
const playlists = parsePlaylistUrls(mainManifestContent);
if (playlists.length > 0) {
const chosenPlaylistUrl = new URL(playlists[0], url).toString();
availableChunksContent = await (await fetch(chosenPlaylistUrl)).text();
}
const segment = parseSegment(availableChunksContent);
const format = getFormat(segment);
return format
} catch (err) {
console.error('Error while trying to parse the manifest playlist', err);
return undefined;
}
}

11
server/node_modules/castable-video/castable-video.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
import { CustomVideoElement } from 'custom-media-element';
import { CastableMediaMixin } from './castable-mixin.js';
export const CastableVideoElement = globalThis.document
? CastableMediaMixin(CustomVideoElement)
: class {};
if (globalThis.customElements && !globalThis.customElements.get('castable-video')) {
globalThis.CastableVideoElement = CastableVideoElement;
globalThis.customElements.define('castable-video', CastableVideoElement);
}

View File

@@ -0,0 +1,109 @@
# Custom Media Element
[![NPM Version](https://img.shields.io/npm/v/custom-media-element?style=flat-square&color=informational)](https://www.npmjs.com/package/custom-media-element)
[![NPM Downloads](https://img.shields.io/npm/dm/custom-media-element?style=flat-square&color=informational&label=npm)](https://www.npmjs.com/package/custom-media-element)
[![jsDelivr hits (npm)](https://img.shields.io/jsdelivr/npm/hm/custom-media-element?style=flat-square&color=%23FF5627)](https://www.jsdelivr.com/package/npm/custom-media-element)
[![npm bundle size](https://img.shields.io/bundlephobia/minzip/custom-media-element?style=flat-square&color=success&label=gzip)](https://bundlephobia.com/result?p=custom-media-element)
[![Codecov](https://img.shields.io/codecov/c/github/muxinc/custom-media-element?style=flat-square)](https://app.codecov.io/gh/muxinc/custom-media-element)
A custom element for extending the native media elements (`<audio>` or `<video>`).
## Usage
```js
import { CustomVideoElement } from 'custom-media-element';
class MyCustomVideoElement extends CustomVideoElement {
constructor() {
super();
}
// Override the play method.
play() {
return super.play()
}
// Override the src getter & setter.
get src() {
return super.src;
}
set src(src) {
super.src = src;
}
}
if (globalThis.customElements && !globalThis.customElements.get('my-custom-video')) {
globalThis.customElements.define('my-custom-video', MyCustomVideoElement);
}
export default MyCustomVideoElement;
```
```html
<my-custom-video
src="https://stream.mux.com/A3VXy02VoUinw01pwyomEO3bHnG4P32xzV7u1j1FSzjNg/low.mp4"
></my-custom-video>
```
## Interfaces
```ts
export const Events: string[];
export const audioTemplate: HTMLTemplateElement;
export const videoTemplate: HTMLTemplateElement;
export class CustomAudioElement extends HTMLAudioElement implements HTMLAudioElement {
static readonly observedAttributes: string[];
static Events: string[];
static template: HTMLTemplateElement;
readonly nativeEl: HTMLAudioElement;
attributeChangedCallback(attrName: string, oldValue?: string | null, newValue?: string | null): void;
connectedCallback(): void;
disconnectedCallback(): void;
handleEvent(event: Event): void;
}
export class CustomVideoElement extends HTMLVideoElement implements HTMLVideoElement {
static readonly observedAttributes: string[];
static Events: string[];
static template: HTMLTemplateElement;
readonly nativeEl: HTMLVideoElement;
attributeChangedCallback(attrName: string, oldValue?: string | null, newValue?: string | null): void;
connectedCallback(): void;
disconnectedCallback(): void;
handleEvent(event: Event): void;
}
type CustomMediaElementConstructor<T> = {
readonly observedAttributes: string[];
Events: string[];
template: HTMLTemplateElement;
new(): T
};
export function CustomMediaMixin(superclass: any, options: { tag: 'video', is?: string }):
CustomMediaElementConstructor<CustomVideoElement>;
export function CustomMediaMixin(superclass: any, options: { tag: 'audio', is?: string }):
CustomMediaElementConstructor<CustomAudioElement>;
```
## Related
- [Media Chrome](https://github.com/muxinc/media-chrome) Your media player's dancing suit. 🕺
- [`<hls-video>`](https://github.com/muxinc/media-elements/tree/main/packages/hls-video-element) A custom element for playing HTTP Live Streaming (HLS) videos.
- [`<youtube-video>`](https://github.com/muxinc/media-elements/tree/main/packages/youtube-video-element) A custom element for the YouTube player.
- [`<vimeo-video>`](https://github.com/muxinc/media-elements/tree/main/packages/vimeo-video-element) A custom element for the Vimeo player.
- [`<spotify-audio>`](https://github.com/muxinc/media-elements/tree/main/packages/spotify-audio-element) A custom element for the Spotify player.
- [`<jwplayer-video>`](https://github.com/muxinc/media-elements/tree/main/packages/jwplayer-video-element) A custom element for the JW player.
- [`<wistia-video>`](https://github.com/muxinc/media-elements/tree/main/packages/wistia-video-element) A custom element for the Wistia player.
- [`<cloudflare-video>`](https://github.com/muxinc/media-elements/tree/main/packages/cloudflare-video-element) A custom element for the Cloudflare player.
- [`<videojs-video>`](https://github.com/muxinc/media-elements/tree/main/packages/videojs-video-element) A custom element for Video.js.
- [`<castable-video>`](https://github.com/muxinc/media-elements/tree/main/packages/castable-video) Cast your video element to the big screen with ease!
- [`<mux-player>`](https://github.com/muxinc/elements/tree/main/packages/mux-player) The official Mux-flavored video player custom element.
- [`<mux-video>`](https://github.com/muxinc/elements/tree/main/packages/mux-video) A Mux-flavored HTML5 video element w/ hls.js and Mux data builtin.

View File

@@ -0,0 +1,68 @@
/**
* Custom Media Element
* Based on https://github.com/muxinc/custom-video-element - Mux - MIT License
*
* The goal is to create an element that works just like the video element
* but can be extended/sub-classed, because native elements cannot be
* extended today across browsers.
*/
export declare const Events: string[];
export declare const Attributes: string[];
/**
* Helper function to generate the HTML template for audio elements.
*/
declare function getAudioTemplateHTML(attrs: Record<string, string>): string;
/**
* Helper function to generate the HTML template for video elements.
*/
declare function getVideoTemplateHTML(attrs: Record<string, string>): string;
type Constructor<T> = {
new (...args: any[]): T;
};
declare class CustomAudioElementClass extends HTMLAudioElement implements HTMLAudioElement {
static readonly observedAttributes: string[];
static getTemplateHTML: typeof getAudioTemplateHTML;
static shadowRootOptions: ShadowRootInit;
static Events: string[];
readonly nativeEl: HTMLAudioElement;
attributeChangedCallback(attrName: string, oldValue?: string | null, newValue?: string | null): void;
connectedCallback(): void;
disconnectedCallback(): void;
init(): void;
handleEvent(event: Event): void;
}
declare class CustomVideoElementClass extends HTMLVideoElement implements HTMLVideoElement {
static readonly observedAttributes: string[];
static getTemplateHTML: typeof getVideoTemplateHTML;
static shadowRootOptions: ShadowRootInit;
static Events: string[];
readonly nativeEl: HTMLVideoElement;
attributeChangedCallback(attrName: string, oldValue?: string | null, newValue?: string | null): void;
connectedCallback(): void;
disconnectedCallback(): void;
init(): void;
handleEvent(event: Event): void;
}
type CustomMediaElementConstructor<T> = {
readonly observedAttributes: string[];
getTemplateHTML: typeof getVideoTemplateHTML | typeof getAudioTemplateHTML;
shadowRootOptions: ShadowRootInit;
Events: string[];
new (): T;
};
type CustomVideoElement = CustomMediaElementConstructor<CustomVideoElementClass>;
type CustomAudioElement = CustomMediaElementConstructor<CustomAudioElementClass>;
/**
* @see https://justinfagnani.com/2015/12/21/real-mixins-with-javascript-classes/
*/
export declare function CustomMediaMixin<T extends Constructor<HTMLElement>>(superclass: T, { tag, is }: {
tag: 'video';
is?: string;
}): CustomVideoElement;
export declare function CustomMediaMixin<T extends Constructor<HTMLElement>>(superclass: T, { tag, is }: {
tag: 'audio';
is?: string;
}): CustomAudioElement;
export declare const CustomVideoElement: CustomVideoElement;
export declare const CustomAudioElement: CustomAudioElement;
export {};

View File

@@ -0,0 +1,369 @@
"use strict";
var CustomMediaElement = (() => {
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// custom-media-element.ts
var custom_media_element_exports = {};
__export(custom_media_element_exports, {
Attributes: () => Attributes,
CustomAudioElement: () => CustomAudioElement,
CustomMediaMixin: () => CustomMediaMixin,
CustomVideoElement: () => CustomVideoElement,
Events: () => Events
});
var Events = [
"abort",
"canplay",
"canplaythrough",
"durationchange",
"emptied",
"encrypted",
"ended",
"error",
"loadeddata",
"loadedmetadata",
"loadstart",
"pause",
"play",
"playing",
"progress",
"ratechange",
"seeked",
"seeking",
"stalled",
"suspend",
"timeupdate",
"volumechange",
"waiting",
"waitingforkey",
"resize",
"enterpictureinpicture",
"leavepictureinpicture",
"webkitbeginfullscreen",
"webkitendfullscreen",
"webkitpresentationmodechanged"
];
var Attributes = [
"autopictureinpicture",
"disablepictureinpicture",
"disableremoteplayback",
"autoplay",
"controls",
"controlslist",
"crossorigin",
"loop",
"muted",
"playsinline",
"poster",
"preload",
"src"
];
function getAudioTemplateHTML(attrs) {
return (
/*html*/
`
<style>
:host {
display: inline-flex;
line-height: 0;
flex-direction: column;
justify-content: end;
}
audio {
width: 100%;
}
</style>
<slot name="media">
<audio${serializeAttributes(attrs)}></audio>
</slot>
<slot></slot>
`
);
}
function getVideoTemplateHTML(attrs) {
return (
/*html*/
`
<style>
:host {
display: inline-block;
line-height: 0;
}
video {
max-width: 100%;
max-height: 100%;
min-width: 100%;
min-height: 100%;
object-fit: var(--media-object-fit, contain);
object-position: var(--media-object-position, 50% 50%);
}
video::-webkit-media-text-track-container {
transform: var(--media-webkit-text-track-transform);
transition: var(--media-webkit-text-track-transition);
}
</style>
<slot name="media">
<video${serializeAttributes(attrs)}></video>
</slot>
<slot></slot>
`
);
}
function CustomMediaMixin(superclass, { tag, is }) {
const nativeElTest = globalThis.document?.createElement?.(tag, { is });
const nativeElProps = nativeElTest ? getNativeElProps(nativeElTest) : [];
return class CustomMedia extends superclass {
static getTemplateHTML = tag.endsWith("audio") ? getAudioTemplateHTML : getVideoTemplateHTML;
static shadowRootOptions = { mode: "open" };
static Events = Events;
static #isDefined = false;
static get observedAttributes() {
CustomMedia.#define();
const natAttrs = nativeElTest?.constructor?.observedAttributes ?? [];
return [
...natAttrs,
...Attributes
];
}
static #define() {
if (this.#isDefined) return;
this.#isDefined = true;
const propsToAttrs = new Set(this.observedAttributes);
propsToAttrs.delete("muted");
for (const prop of nativeElProps) {
if (prop in this.prototype) continue;
if (typeof nativeElTest[prop] === "function") {
this.prototype[prop] = function(...args) {
this.#init();
const fn = () => {
if (this.call) return this.call(prop, ...args);
const nativeFn = this.nativeEl?.[prop];
return nativeFn?.apply(this.nativeEl, args);
};
return fn();
};
} else {
const config = {
get() {
this.#init();
const attr = prop.toLowerCase();
if (propsToAttrs.has(attr)) {
const val = this.getAttribute(attr);
return val === null ? false : val === "" ? true : val;
}
return this.get?.(prop) ?? this.nativeEl?.[prop];
}
};
if (prop !== prop.toUpperCase()) {
config.set = function(val) {
this.#init();
const attr = prop.toLowerCase();
if (propsToAttrs.has(attr)) {
if (val === true || val === false || val == null) {
this.toggleAttribute(attr, Boolean(val));
} else {
this.setAttribute(attr, val);
}
return;
}
if (this.set) {
this.set(prop, val);
return;
}
if (this.nativeEl) {
this.nativeEl[prop] = val;
}
};
}
Object.defineProperty(this.prototype, prop, config);
}
}
}
// Private fields
#isInit = false;
#nativeEl = null;
#childMap = /* @__PURE__ */ new Map();
#childObserver;
get;
set;
call;
// If the custom element is defined before the custom element's HTML is parsed
// no attributes will be available in the constructor (construction process).
// Wait until initializing in the attributeChangedCallback or
// connectedCallback or accessing any properties.
get nativeEl() {
this.#init();
return this.#nativeEl ?? this.querySelector(":scope > [slot=media]") ?? this.querySelector(tag) ?? this.shadowRoot?.querySelector(tag) ?? null;
}
set nativeEl(val) {
this.#nativeEl = val;
}
get defaultMuted() {
return this.hasAttribute("muted");
}
set defaultMuted(val) {
this.toggleAttribute("muted", val);
}
get src() {
return this.getAttribute("src");
}
set src(val) {
this.setAttribute("src", `${val}`);
}
get preload() {
return this.getAttribute("preload") ?? this.nativeEl?.preload;
}
set preload(val) {
this.setAttribute("preload", `${val}`);
}
#init() {
if (this.#isInit) return;
this.#isInit = true;
this.init();
}
init() {
if (!this.shadowRoot) {
this.attachShadow({ mode: "open" });
const attrs = namedNodeMapToObject(this.attributes);
if (is) attrs.is = is;
if (tag) attrs.part = tag;
this.shadowRoot.innerHTML = this.constructor.getTemplateHTML(attrs);
}
this.nativeEl.muted = this.hasAttribute("muted");
for (const prop of nativeElProps) {
this.#upgradeProperty(prop);
}
this.#childObserver = new MutationObserver(this.#syncMediaChildAttribute.bind(this));
this.shadowRoot.addEventListener("slotchange", this);
this.#syncMediaChildren();
for (const type of this.constructor.Events) {
this.shadowRoot?.addEventListener(type, this, true);
}
}
handleEvent(event) {
if (event.type === "slotchange") {
this.#syncMediaChildren();
return;
}
if (event.target === this.nativeEl) {
this.dispatchEvent(new CustomEvent(event.type, { detail: event.detail }));
}
}
#syncMediaChildren() {
const removeNativeChildren = new Map(this.#childMap);
const defaultSlot = this.shadowRoot?.querySelector("slot:not([name])");
const mediaChildren = defaultSlot?.assignedElements({ flatten: true }).filter((el) => ["track", "source"].includes(el.localName));
mediaChildren.forEach((el) => {
removeNativeChildren.delete(el);
let clone = this.#childMap.get(el);
if (!clone) {
clone = el.cloneNode();
this.#childMap.set(el, clone);
this.#childObserver?.observe(el, { attributes: true });
}
this.nativeEl?.append(clone);
this.#enableDefaultTrack(clone);
});
removeNativeChildren.forEach((clone, el) => {
clone.remove();
this.#childMap.delete(el);
});
}
#syncMediaChildAttribute(mutations) {
for (const mutation of mutations) {
if (mutation.type === "attributes") {
const { target, attributeName } = mutation;
const clone = this.#childMap.get(target);
if (clone && attributeName) {
clone.setAttribute(attributeName, target.getAttribute(attributeName) ?? "");
this.#enableDefaultTrack(clone);
}
}
}
}
#enableDefaultTrack(trackEl) {
if (trackEl && trackEl.localName === "track" && trackEl.default && (trackEl.kind === "chapters" || trackEl.kind === "metadata") && trackEl.track.mode === "disabled") {
trackEl.track.mode = "hidden";
}
}
#upgradeProperty(prop) {
if (Object.prototype.hasOwnProperty.call(this, prop)) {
const value = this[prop];
delete this[prop];
this[prop] = value;
}
}
attributeChangedCallback(attrName, oldValue, newValue) {
this.#init();
this.#forwardAttribute(attrName, oldValue, newValue);
}
#forwardAttribute(attrName, _oldValue, newValue) {
if (["id", "class"].includes(attrName)) return;
if (!CustomMedia.observedAttributes.includes(attrName) && this.constructor.observedAttributes.includes(attrName)) {
return;
}
if (newValue === null) {
this.nativeEl?.removeAttribute(attrName);
} else if (this.nativeEl?.getAttribute(attrName) !== newValue) {
this.nativeEl?.setAttribute(attrName, newValue);
}
}
connectedCallback() {
this.#init();
}
};
}
function getNativeElProps(nativeElTest) {
const nativeElProps = [];
for (let proto = Object.getPrototypeOf(nativeElTest); proto && proto !== HTMLElement.prototype; proto = Object.getPrototypeOf(proto)) {
const props = Object.getOwnPropertyNames(proto);
nativeElProps.push(...props);
}
return nativeElProps;
}
function serializeAttributes(attrs) {
let html = "";
for (const key in attrs) {
if (!Attributes.includes(key)) continue;
const value = attrs[key];
if (value === "") html += ` ${key}`;
else html += ` ${key}="${value}"`;
}
return html;
}
function namedNodeMapToObject(namedNodeMap) {
const obj = {};
for (const attr of namedNodeMap) {
obj[attr.name] = attr.value;
}
return obj;
}
var CustomVideoElement = CustomMediaMixin(globalThis.HTMLElement ?? class {
}, {
tag: "video"
});
var CustomAudioElement = CustomMediaMixin(globalThis.HTMLElement ?? class {
}, {
tag: "audio"
});
return __toCommonJS(custom_media_element_exports);
})();

View File

@@ -0,0 +1,345 @@
const Events = [
"abort",
"canplay",
"canplaythrough",
"durationchange",
"emptied",
"encrypted",
"ended",
"error",
"loadeddata",
"loadedmetadata",
"loadstart",
"pause",
"play",
"playing",
"progress",
"ratechange",
"seeked",
"seeking",
"stalled",
"suspend",
"timeupdate",
"volumechange",
"waiting",
"waitingforkey",
"resize",
"enterpictureinpicture",
"leavepictureinpicture",
"webkitbeginfullscreen",
"webkitendfullscreen",
"webkitpresentationmodechanged"
];
const Attributes = [
"autopictureinpicture",
"disablepictureinpicture",
"disableremoteplayback",
"autoplay",
"controls",
"controlslist",
"crossorigin",
"loop",
"muted",
"playsinline",
"poster",
"preload",
"src"
];
function getAudioTemplateHTML(attrs) {
return (
/*html*/
`
<style>
:host {
display: inline-flex;
line-height: 0;
flex-direction: column;
justify-content: end;
}
audio {
width: 100%;
}
</style>
<slot name="media">
<audio${serializeAttributes(attrs)}></audio>
</slot>
<slot></slot>
`
);
}
function getVideoTemplateHTML(attrs) {
return (
/*html*/
`
<style>
:host {
display: inline-block;
line-height: 0;
}
video {
max-width: 100%;
max-height: 100%;
min-width: 100%;
min-height: 100%;
object-fit: var(--media-object-fit, contain);
object-position: var(--media-object-position, 50% 50%);
}
video::-webkit-media-text-track-container {
transform: var(--media-webkit-text-track-transform);
transition: var(--media-webkit-text-track-transition);
}
</style>
<slot name="media">
<video${serializeAttributes(attrs)}></video>
</slot>
<slot></slot>
`
);
}
function CustomMediaMixin(superclass, { tag, is }) {
const nativeElTest = globalThis.document?.createElement?.(tag, { is });
const nativeElProps = nativeElTest ? getNativeElProps(nativeElTest) : [];
return class CustomMedia extends superclass {
static getTemplateHTML = tag.endsWith("audio") ? getAudioTemplateHTML : getVideoTemplateHTML;
static shadowRootOptions = { mode: "open" };
static Events = Events;
static #isDefined = false;
static get observedAttributes() {
CustomMedia.#define();
const natAttrs = nativeElTest?.constructor?.observedAttributes ?? [];
return [
...natAttrs,
...Attributes
];
}
static #define() {
if (this.#isDefined) return;
this.#isDefined = true;
const propsToAttrs = new Set(this.observedAttributes);
propsToAttrs.delete("muted");
for (const prop of nativeElProps) {
if (prop in this.prototype) continue;
if (typeof nativeElTest[prop] === "function") {
this.prototype[prop] = function(...args) {
this.#init();
const fn = () => {
if (this.call) return this.call(prop, ...args);
const nativeFn = this.nativeEl?.[prop];
return nativeFn?.apply(this.nativeEl, args);
};
return fn();
};
} else {
const config = {
get() {
this.#init();
const attr = prop.toLowerCase();
if (propsToAttrs.has(attr)) {
const val = this.getAttribute(attr);
return val === null ? false : val === "" ? true : val;
}
return this.get?.(prop) ?? this.nativeEl?.[prop];
}
};
if (prop !== prop.toUpperCase()) {
config.set = function(val) {
this.#init();
const attr = prop.toLowerCase();
if (propsToAttrs.has(attr)) {
if (val === true || val === false || val == null) {
this.toggleAttribute(attr, Boolean(val));
} else {
this.setAttribute(attr, val);
}
return;
}
if (this.set) {
this.set(prop, val);
return;
}
if (this.nativeEl) {
this.nativeEl[prop] = val;
}
};
}
Object.defineProperty(this.prototype, prop, config);
}
}
}
// Private fields
#isInit = false;
#nativeEl = null;
#childMap = /* @__PURE__ */ new Map();
#childObserver;
get;
set;
call;
// If the custom element is defined before the custom element's HTML is parsed
// no attributes will be available in the constructor (construction process).
// Wait until initializing in the attributeChangedCallback or
// connectedCallback or accessing any properties.
get nativeEl() {
this.#init();
return this.#nativeEl ?? this.querySelector(":scope > [slot=media]") ?? this.querySelector(tag) ?? this.shadowRoot?.querySelector(tag) ?? null;
}
set nativeEl(val) {
this.#nativeEl = val;
}
get defaultMuted() {
return this.hasAttribute("muted");
}
set defaultMuted(val) {
this.toggleAttribute("muted", val);
}
get src() {
return this.getAttribute("src");
}
set src(val) {
this.setAttribute("src", `${val}`);
}
get preload() {
return this.getAttribute("preload") ?? this.nativeEl?.preload;
}
set preload(val) {
this.setAttribute("preload", `${val}`);
}
#init() {
if (this.#isInit) return;
this.#isInit = true;
this.init();
}
init() {
if (!this.shadowRoot) {
this.attachShadow({ mode: "open" });
const attrs = namedNodeMapToObject(this.attributes);
if (is) attrs.is = is;
if (tag) attrs.part = tag;
this.shadowRoot.innerHTML = this.constructor.getTemplateHTML(attrs);
}
this.nativeEl.muted = this.hasAttribute("muted");
for (const prop of nativeElProps) {
this.#upgradeProperty(prop);
}
this.#childObserver = new MutationObserver(this.#syncMediaChildAttribute.bind(this));
this.shadowRoot.addEventListener("slotchange", this);
this.#syncMediaChildren();
for (const type of this.constructor.Events) {
this.shadowRoot?.addEventListener(type, this, true);
}
}
handleEvent(event) {
if (event.type === "slotchange") {
this.#syncMediaChildren();
return;
}
if (event.target === this.nativeEl) {
this.dispatchEvent(new CustomEvent(event.type, { detail: event.detail }));
}
}
#syncMediaChildren() {
const removeNativeChildren = new Map(this.#childMap);
const defaultSlot = this.shadowRoot?.querySelector("slot:not([name])");
const mediaChildren = defaultSlot?.assignedElements({ flatten: true }).filter((el) => ["track", "source"].includes(el.localName));
mediaChildren.forEach((el) => {
removeNativeChildren.delete(el);
let clone = this.#childMap.get(el);
if (!clone) {
clone = el.cloneNode();
this.#childMap.set(el, clone);
this.#childObserver?.observe(el, { attributes: true });
}
this.nativeEl?.append(clone);
this.#enableDefaultTrack(clone);
});
removeNativeChildren.forEach((clone, el) => {
clone.remove();
this.#childMap.delete(el);
});
}
#syncMediaChildAttribute(mutations) {
for (const mutation of mutations) {
if (mutation.type === "attributes") {
const { target, attributeName } = mutation;
const clone = this.#childMap.get(target);
if (clone && attributeName) {
clone.setAttribute(attributeName, target.getAttribute(attributeName) ?? "");
this.#enableDefaultTrack(clone);
}
}
}
}
#enableDefaultTrack(trackEl) {
if (trackEl && trackEl.localName === "track" && trackEl.default && (trackEl.kind === "chapters" || trackEl.kind === "metadata") && trackEl.track.mode === "disabled") {
trackEl.track.mode = "hidden";
}
}
#upgradeProperty(prop) {
if (Object.prototype.hasOwnProperty.call(this, prop)) {
const value = this[prop];
delete this[prop];
this[prop] = value;
}
}
attributeChangedCallback(attrName, oldValue, newValue) {
this.#init();
this.#forwardAttribute(attrName, oldValue, newValue);
}
#forwardAttribute(attrName, _oldValue, newValue) {
if (["id", "class"].includes(attrName)) return;
if (!CustomMedia.observedAttributes.includes(attrName) && this.constructor.observedAttributes.includes(attrName)) {
return;
}
if (newValue === null) {
this.nativeEl?.removeAttribute(attrName);
} else if (this.nativeEl?.getAttribute(attrName) !== newValue) {
this.nativeEl?.setAttribute(attrName, newValue);
}
}
connectedCallback() {
this.#init();
}
};
}
function getNativeElProps(nativeElTest) {
const nativeElProps = [];
for (let proto = Object.getPrototypeOf(nativeElTest); proto && proto !== HTMLElement.prototype; proto = Object.getPrototypeOf(proto)) {
const props = Object.getOwnPropertyNames(proto);
nativeElProps.push(...props);
}
return nativeElProps;
}
function serializeAttributes(attrs) {
let html = "";
for (const key in attrs) {
if (!Attributes.includes(key)) continue;
const value = attrs[key];
if (value === "") html += ` ${key}`;
else html += ` ${key}="${value}"`;
}
return html;
}
function namedNodeMapToObject(namedNodeMap) {
const obj = {};
for (const attr of namedNodeMap) {
obj[attr.name] = attr.value;
}
return obj;
}
const CustomVideoElement = CustomMediaMixin(globalThis.HTMLElement ?? class {
}, {
tag: "video"
});
const CustomAudioElement = CustomMediaMixin(globalThis.HTMLElement ?? class {
}, {
tag: "audio"
});
export {
Attributes,
CustomAudioElement,
CustomMediaMixin,
CustomVideoElement,
Events
};

View File

@@ -0,0 +1,77 @@
{
"name": "custom-media-element",
"version": "1.4.2",
"description": "A custom element for extending the native media elements (<audio> or <video>)",
"author": "@muxinc",
"license": "MIT",
"homepage": "https://github.com/muxinc/media-elements#readme",
"bugs": {
"url": "https://github.com/muxinc/media-elements/issues"
},
"repository": {
"type": "git",
"url": "git+https://github.com/muxinc/media-elements.git",
"directory": "packages/custom-media-element"
},
"files": [
"dist"
],
"type": "module",
"main": "dist/custom-media-element.js",
"types": "dist/custom-media-element.d.ts",
"scripts": {
"lint": "eslint *.ts",
"pretest": "npm run build",
"test": "wet test test/eager-upgrade.html test/lazy-upgrade.html --coverage",
"dev": "tsc -w & esbuild custom-media-element.ts --format=esm --outdir=dist --watch=forever",
"build:esm": "esbuild custom-media-element.ts --format=esm --outdir=dist",
"build:iife": "esbuild custom-media-element.ts --bundle --outfile=dist/custom-media-element.iife.js --global-name=CustomMediaElement",
"build": "rm -rf dist && npm run build:esm && npm run build:iife",
"postbuild": "tsc",
"serve": "wet serve --redirect :examples/ --cors"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^8.18.1",
"@typescript-eslint/parser": "^8.18.1",
"esbuild": "^0.24.0",
"typescript": "5.7.2",
"wet-run": "^1.2.5"
},
"eslintConfig": {
"root": true,
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"plugins": [
"@typescript-eslint"
],
"globals": {
"globalThis": "writable"
},
"env": {
"browser": true,
"es6": true,
"node": true,
"mocha": true
},
"parserOptions": {
"ecmaVersion": 2022,
"sourceType": "module"
},
"rules": {
"@typescript-eslint/ban-ts-comment": 0,
"@typescript-eslint/no-explicit-any": 0
}
},
"keywords": [
"custom",
"element",
"video",
"audio",
"media",
"web",
"component"
]
}

45
server/node_modules/castable-video/package.json generated vendored Normal file
View File

@@ -0,0 +1,45 @@
{
"name": "castable-video",
"version": "1.1.7",
"description": "Cast your video element to the big screen with ease!",
"author": "@muxinc",
"license": "MIT",
"homepage": "https://github.com/muxinc/media-elements#readme",
"bugs": {
"url": "https://github.com/muxinc/media-elements/issues"
},
"repository": {
"type": "git",
"url": "git+https://github.com/muxinc/media-elements.git",
"directory": "packages/castable-video"
},
"files": [
"castable-mixin.d.ts",
"castable-mixin.js",
"castable-remote-playback.js",
"castable-utils.js"
],
"type": "module",
"main": "castable-video.js",
"scripts": {
"lint": "eslint *.js",
"serve": "wet serve"
},
"dependencies": {
"custom-media-element": "~1.4.2"
},
"devDependencies": {
"wet-run": "^1.2.5"
},
"keywords": [
"chromecast",
"cast",
"webcomponent",
"html5",
"video",
"audio",
"media",
"player",
"controls"
]
}