node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

684
server/node_modules/hls.js/src/config.ts generated vendored Normal file
View File

@@ -0,0 +1,684 @@
import AbrController from './controller/abr-controller';
import AudioStreamController from './controller/audio-stream-controller';
import AudioTrackController from './controller/audio-track-controller';
import { SubtitleStreamController } from './controller/subtitle-stream-controller';
import SubtitleTrackController from './controller/subtitle-track-controller';
import BufferController from './controller/buffer-controller';
import { TimelineController } from './controller/timeline-controller';
import CapLevelController from './controller/cap-level-controller';
import FPSController from './controller/fps-controller';
import EMEController, {
MediaKeySessionContext,
} from './controller/eme-controller';
import CMCDController from './controller/cmcd-controller';
import ContentSteeringController from './controller/content-steering-controller';
import ErrorController from './controller/error-controller';
import XhrLoader from './utils/xhr-loader';
import FetchLoader, { fetchSupported } from './utils/fetch-loader';
import Cues from './utils/cues';
import { requestMediaKeySystemAccess } from './utils/mediakeys-helper';
import { ILogger, logger } from './utils/logger';
import type Hls from './hls';
import type { CuesInterface } from './utils/cues';
import type { MediaKeyFunc, KeySystems } from './utils/mediakeys-helper';
import type {
FragmentLoaderContext,
Loader,
LoaderContext,
LoaderResponse,
PlaylistLoaderContext,
} from './types/loader';
import type {
AudioSelectionOption,
SubtitleSelectionOption,
VideoSelectionOption,
} from './types/media-playlist';
export type ABRControllerConfig = {
abrEwmaFastLive: number;
abrEwmaSlowLive: number;
abrEwmaFastVoD: number;
abrEwmaSlowVoD: number;
/**
* Default bandwidth estimate in bits/s prior to collecting fragment bandwidth samples
*/
abrEwmaDefaultEstimate: number;
abrEwmaDefaultEstimateMax: number;
abrBandWidthFactor: number;
abrBandWidthUpFactor: number;
abrMaxWithRealBitrate: boolean;
maxStarvationDelay: number;
maxLoadingDelay: number;
};
export type BufferControllerConfig = {
appendErrorMaxRetry: number;
backBufferLength: number;
frontBufferFlushThreshold: number;
liveDurationInfinity: boolean;
/**
* @deprecated use backBufferLength
*/
liveBackBufferLength: number | null;
};
export type CapLevelControllerConfig = {
capLevelToPlayerSize: boolean;
};
export type CMCDControllerConfig = {
sessionId?: string;
contentId?: string;
useHeaders?: boolean;
includeKeys?: string[];
};
export type DRMSystemOptions = {
audioRobustness?: string;
videoRobustness?: string;
audioEncryptionScheme?: string | null;
videoEncryptionScheme?: string | null;
persistentState?: MediaKeysRequirement;
distinctiveIdentifier?: MediaKeysRequirement;
sessionTypes?: string[];
sessionType?: string;
};
export type DRMSystemConfiguration = {
licenseUrl: string;
serverCertificateUrl?: string;
generateRequest?: (
this: Hls,
initDataType: string,
initData: ArrayBuffer | null,
keyContext: MediaKeySessionContext,
) =>
| { initDataType: string; initData: ArrayBuffer | null }
| undefined
| never;
};
export type DRMSystemsConfiguration = Partial<
Record<KeySystems, DRMSystemConfiguration>
>;
export type EMEControllerConfig = {
licenseXhrSetup?: (
this: Hls,
xhr: XMLHttpRequest,
url: string,
keyContext: MediaKeySessionContext,
licenseChallenge: Uint8Array,
) => void | Uint8Array | Promise<Uint8Array | void>;
licenseResponseCallback?: (
this: Hls,
xhr: XMLHttpRequest,
url: string,
keyContext: MediaKeySessionContext,
) => ArrayBuffer;
emeEnabled: boolean;
widevineLicenseUrl?: string;
drmSystems: DRMSystemsConfiguration;
drmSystemOptions: DRMSystemOptions;
requestMediaKeySystemAccessFunc: MediaKeyFunc | null;
};
export interface FragmentLoaderConstructor {
new (confg: HlsConfig): Loader<FragmentLoaderContext>;
}
/**
* @deprecated use fragLoadPolicy.default
*/
export type FragmentLoaderConfig = {
fragLoadingTimeOut: number;
fragLoadingMaxRetry: number;
fragLoadingRetryDelay: number;
fragLoadingMaxRetryTimeout: number;
};
export type FPSControllerConfig = {
capLevelOnFPSDrop: boolean;
fpsDroppedMonitoringPeriod: number;
fpsDroppedMonitoringThreshold: number;
};
export type LevelControllerConfig = {
startLevel?: number;
};
export type MP4RemuxerConfig = {
stretchShortVideoTrack: boolean;
maxAudioFramesDrift: number;
};
export interface PlaylistLoaderConstructor {
new (confg: HlsConfig): Loader<PlaylistLoaderContext>;
}
/**
* @deprecated use manifestLoadPolicy.default and playlistLoadPolicy.default
*/
export type PlaylistLoaderConfig = {
manifestLoadingTimeOut: number;
manifestLoadingMaxRetry: number;
manifestLoadingRetryDelay: number;
manifestLoadingMaxRetryTimeout: number;
levelLoadingTimeOut: number;
levelLoadingMaxRetry: number;
levelLoadingRetryDelay: number;
levelLoadingMaxRetryTimeout: number;
};
export type HlsLoadPolicies = {
fragLoadPolicy: LoadPolicy;
keyLoadPolicy: LoadPolicy;
certLoadPolicy: LoadPolicy;
playlistLoadPolicy: LoadPolicy;
manifestLoadPolicy: LoadPolicy;
steeringManifestLoadPolicy: LoadPolicy;
};
export type LoadPolicy = {
default: LoaderConfig;
};
export type LoaderConfig = {
maxTimeToFirstByteMs: number; // Max time to first byte
maxLoadTimeMs: number; // Max time for load completion
timeoutRetry: RetryConfig | null;
errorRetry: RetryConfig | null;
};
export type RetryConfig = {
maxNumRetry: number; // Maximum number of retries
retryDelayMs: number; // Retry delay = 2^retryCount * retryDelayMs (exponential) or retryCount * retryDelayMs (linear)
maxRetryDelayMs: number; // Maximum delay between retries
backoff?: 'exponential' | 'linear'; // used to determine retry backoff duration (see retryDelayMs)
shouldRetry?: (
retryConfig: RetryConfig | null | undefined,
retryCount: number,
isTimeout: boolean,
loaderResponse: LoaderResponse | undefined,
retry: boolean,
) => boolean;
};
export type StreamControllerConfig = {
autoStartLoad: boolean;
startPosition: number;
defaultAudioCodec?: string;
initialLiveManifestSize: number;
maxBufferLength: number;
maxBufferSize: number;
maxBufferHole: number;
highBufferWatchdogPeriod: number;
nudgeOffset: number;
nudgeMaxRetry: number;
maxFragLookUpTolerance: number;
maxMaxBufferLength: number;
startFragPrefetch: boolean;
testBandwidth: boolean;
};
export type SelectionPreferences = {
videoPreference?: VideoSelectionOption;
audioPreference?: AudioSelectionOption;
subtitlePreference?: SubtitleSelectionOption;
};
export type LatencyControllerConfig = {
liveSyncDurationCount: number;
liveMaxLatencyDurationCount: number;
liveSyncDuration?: number;
liveMaxLatencyDuration?: number;
maxLiveSyncPlaybackRate: number;
};
export type MetadataControllerConfig = {
enableDateRangeMetadataCues: boolean;
enableEmsgMetadataCues: boolean;
enableID3MetadataCues: boolean;
};
export type TimelineControllerConfig = {
cueHandler: CuesInterface;
enableWebVTT: boolean;
enableIMSC1: boolean;
enableCEA708Captions: boolean;
captionsTextTrack1Label: string;
captionsTextTrack1LanguageCode: string;
captionsTextTrack2Label: string;
captionsTextTrack2LanguageCode: string;
captionsTextTrack3Label: string;
captionsTextTrack3LanguageCode: string;
captionsTextTrack4Label: string;
captionsTextTrack4LanguageCode: string;
renderTextTracksNatively: boolean;
};
export type TSDemuxerConfig = {
forceKeyFrameOnDiscontinuity: boolean;
};
export type HlsConfig = {
debug: boolean | ILogger;
enableWorker: boolean;
workerPath: null | string;
enableSoftwareAES: boolean;
minAutoBitrate: number;
ignoreDevicePixelRatio: boolean;
preferManagedMediaSource: boolean;
loader: { new (confg: HlsConfig): Loader<LoaderContext> };
fLoader?: FragmentLoaderConstructor;
pLoader?: PlaylistLoaderConstructor;
fetchSetup?: (context: LoaderContext, initParams: any) => Request;
xhrSetup?: (xhr: XMLHttpRequest, url: string) => Promise<void> | void;
// Alt Audio
audioStreamController?: typeof AudioStreamController;
audioTrackController?: typeof AudioTrackController;
// Subtitle
subtitleStreamController?: typeof SubtitleStreamController;
subtitleTrackController?: typeof SubtitleTrackController;
timelineController?: typeof TimelineController;
// EME
emeController?: typeof EMEController;
// CMCD
cmcd?: CMCDControllerConfig;
cmcdController?: typeof CMCDController;
// Content Steering
contentSteeringController?: typeof ContentSteeringController;
// MediaCapabilies API for level, track, and switch filtering
useMediaCapabilities: boolean;
abrController: typeof AbrController;
bufferController: typeof BufferController;
capLevelController: typeof CapLevelController;
errorController: typeof ErrorController;
fpsController: typeof FPSController;
progressive: boolean;
lowLatencyMode: boolean;
} & ABRControllerConfig &
BufferControllerConfig &
CapLevelControllerConfig &
EMEControllerConfig &
FPSControllerConfig &
LevelControllerConfig &
MP4RemuxerConfig &
StreamControllerConfig &
SelectionPreferences &
LatencyControllerConfig &
MetadataControllerConfig &
TimelineControllerConfig &
TSDemuxerConfig &
HlsLoadPolicies &
FragmentLoaderConfig &
PlaylistLoaderConfig;
const defaultLoadPolicy: LoaderConfig = {
maxTimeToFirstByteMs: 8000,
maxLoadTimeMs: 20000,
timeoutRetry: null,
errorRetry: null,
};
/**
* @ignore
* If possible, keep hlsDefaultConfig shallow
* It is cloned whenever a new Hls instance is created, by keeping the config
* shallow the properties are cloned, and we don't end up manipulating the default
*/
export const hlsDefaultConfig: HlsConfig = {
autoStartLoad: true, // used by stream-controller
startPosition: -1, // used by stream-controller
defaultAudioCodec: undefined, // used by stream-controller
debug: false, // used by logger
capLevelOnFPSDrop: false, // used by fps-controller
capLevelToPlayerSize: false, // used by cap-level-controller
ignoreDevicePixelRatio: false, // used by cap-level-controller
preferManagedMediaSource: true,
initialLiveManifestSize: 1, // used by stream-controller
maxBufferLength: 30, // used by stream-controller
backBufferLength: Infinity, // used by buffer-controller
frontBufferFlushThreshold: Infinity,
maxBufferSize: 60 * 1000 * 1000, // used by stream-controller
maxBufferHole: 0.1, // used by stream-controller
highBufferWatchdogPeriod: 2, // used by stream-controller
nudgeOffset: 0.1, // used by stream-controller
nudgeMaxRetry: 3, // used by stream-controller
maxFragLookUpTolerance: 0.25, // used by stream-controller
liveSyncDurationCount: 3, // used by latency-controller
liveMaxLatencyDurationCount: Infinity, // used by latency-controller
liveSyncDuration: undefined, // used by latency-controller
liveMaxLatencyDuration: undefined, // used by latency-controller
maxLiveSyncPlaybackRate: 1, // used by latency-controller
liveDurationInfinity: false, // used by buffer-controller
/**
* @deprecated use backBufferLength
*/
liveBackBufferLength: null, // used by buffer-controller
maxMaxBufferLength: 600, // used by stream-controller
enableWorker: true, // used by transmuxer
workerPath: null, // used by transmuxer
enableSoftwareAES: true, // used by decrypter
startLevel: undefined, // used by level-controller
startFragPrefetch: false, // used by stream-controller
fpsDroppedMonitoringPeriod: 5000, // used by fps-controller
fpsDroppedMonitoringThreshold: 0.2, // used by fps-controller
appendErrorMaxRetry: 3, // used by buffer-controller
loader: XhrLoader,
// loader: FetchLoader,
fLoader: undefined, // used by fragment-loader
pLoader: undefined, // used by playlist-loader
xhrSetup: undefined, // used by xhr-loader
licenseXhrSetup: undefined, // used by eme-controller
licenseResponseCallback: undefined, // used by eme-controller
abrController: AbrController,
bufferController: BufferController,
capLevelController: CapLevelController,
errorController: ErrorController,
fpsController: FPSController,
stretchShortVideoTrack: false, // used by mp4-remuxer
maxAudioFramesDrift: 1, // used by mp4-remuxer
forceKeyFrameOnDiscontinuity: true, // used by ts-demuxer
abrEwmaFastLive: 3, // used by abr-controller
abrEwmaSlowLive: 9, // used by abr-controller
abrEwmaFastVoD: 3, // used by abr-controller
abrEwmaSlowVoD: 9, // used by abr-controller
abrEwmaDefaultEstimate: 5e5, // 500 kbps // used by abr-controller
abrEwmaDefaultEstimateMax: 5e6, // 5 mbps
abrBandWidthFactor: 0.95, // used by abr-controller
abrBandWidthUpFactor: 0.7, // used by abr-controller
abrMaxWithRealBitrate: false, // used by abr-controller
maxStarvationDelay: 4, // used by abr-controller
maxLoadingDelay: 4, // used by abr-controller
minAutoBitrate: 0, // used by hls
emeEnabled: false, // used by eme-controller
widevineLicenseUrl: undefined, // used by eme-controller
drmSystems: {}, // used by eme-controller
drmSystemOptions: {}, // used by eme-controller
requestMediaKeySystemAccessFunc: __USE_EME_DRM__
? requestMediaKeySystemAccess
: null, // used by eme-controller
testBandwidth: true,
progressive: false,
lowLatencyMode: true,
cmcd: undefined,
enableDateRangeMetadataCues: true,
enableEmsgMetadataCues: true,
enableID3MetadataCues: true,
useMediaCapabilities: __USE_MEDIA_CAPABILITIES__,
certLoadPolicy: {
default: defaultLoadPolicy,
},
keyLoadPolicy: {
default: {
maxTimeToFirstByteMs: 8000,
maxLoadTimeMs: 20000,
timeoutRetry: {
maxNumRetry: 1,
retryDelayMs: 1000,
maxRetryDelayMs: 20000,
backoff: 'linear',
},
errorRetry: {
maxNumRetry: 8,
retryDelayMs: 1000,
maxRetryDelayMs: 20000,
backoff: 'linear',
},
},
},
manifestLoadPolicy: {
default: {
maxTimeToFirstByteMs: Infinity,
maxLoadTimeMs: 20000,
timeoutRetry: {
maxNumRetry: 2,
retryDelayMs: 0,
maxRetryDelayMs: 0,
},
errorRetry: {
maxNumRetry: 1,
retryDelayMs: 1000,
maxRetryDelayMs: 8000,
},
},
},
playlistLoadPolicy: {
default: {
maxTimeToFirstByteMs: 10000,
maxLoadTimeMs: 20000,
timeoutRetry: {
maxNumRetry: 2,
retryDelayMs: 0,
maxRetryDelayMs: 0,
},
errorRetry: {
maxNumRetry: 2,
retryDelayMs: 1000,
maxRetryDelayMs: 8000,
},
},
},
fragLoadPolicy: {
default: {
maxTimeToFirstByteMs: 10000,
maxLoadTimeMs: 120000,
timeoutRetry: {
maxNumRetry: 4,
retryDelayMs: 0,
maxRetryDelayMs: 0,
},
errorRetry: {
maxNumRetry: 6,
retryDelayMs: 1000,
maxRetryDelayMs: 8000,
},
},
},
steeringManifestLoadPolicy: {
default: __USE_CONTENT_STEERING__
? {
maxTimeToFirstByteMs: 10000,
maxLoadTimeMs: 20000,
timeoutRetry: {
maxNumRetry: 2,
retryDelayMs: 0,
maxRetryDelayMs: 0,
},
errorRetry: {
maxNumRetry: 1,
retryDelayMs: 1000,
maxRetryDelayMs: 8000,
},
}
: defaultLoadPolicy,
},
// These default settings are deprecated in favor of the above policies
// and are maintained for backwards compatibility
manifestLoadingTimeOut: 10000,
manifestLoadingMaxRetry: 1,
manifestLoadingRetryDelay: 1000,
manifestLoadingMaxRetryTimeout: 64000,
levelLoadingTimeOut: 10000,
levelLoadingMaxRetry: 4,
levelLoadingRetryDelay: 1000,
levelLoadingMaxRetryTimeout: 64000,
fragLoadingTimeOut: 20000,
fragLoadingMaxRetry: 6,
fragLoadingRetryDelay: 1000,
fragLoadingMaxRetryTimeout: 64000,
// Dynamic Modules
...timelineConfig(),
subtitleStreamController: __USE_SUBTITLES__
? SubtitleStreamController
: undefined,
subtitleTrackController: __USE_SUBTITLES__
? SubtitleTrackController
: undefined,
timelineController: __USE_SUBTITLES__ ? TimelineController : undefined,
audioStreamController: __USE_ALT_AUDIO__ ? AudioStreamController : undefined,
audioTrackController: __USE_ALT_AUDIO__ ? AudioTrackController : undefined,
emeController: __USE_EME_DRM__ ? EMEController : undefined,
cmcdController: __USE_CMCD__ ? CMCDController : undefined,
contentSteeringController: __USE_CONTENT_STEERING__
? ContentSteeringController
: undefined,
};
function timelineConfig(): TimelineControllerConfig {
return {
cueHandler: Cues, // used by timeline-controller
enableWebVTT: __USE_SUBTITLES__, // used by timeline-controller
enableIMSC1: __USE_SUBTITLES__, // used by timeline-controller
enableCEA708Captions: __USE_SUBTITLES__, // used by timeline-controller
captionsTextTrack1Label: 'English', // used by timeline-controller
captionsTextTrack1LanguageCode: 'en', // used by timeline-controller
captionsTextTrack2Label: 'Spanish', // used by timeline-controller
captionsTextTrack2LanguageCode: 'es', // used by timeline-controller
captionsTextTrack3Label: 'Unknown CC', // used by timeline-controller
captionsTextTrack3LanguageCode: '', // used by timeline-controller
captionsTextTrack4Label: 'Unknown CC', // used by timeline-controller
captionsTextTrack4LanguageCode: '', // used by timeline-controller
renderTextTracksNatively: true,
};
}
/**
* @ignore
*/
export function mergeConfig(
defaultConfig: HlsConfig,
userConfig: Partial<HlsConfig>,
): HlsConfig {
if (
(userConfig.liveSyncDurationCount ||
userConfig.liveMaxLatencyDurationCount) &&
(userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)
) {
throw new Error(
"Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration",
);
}
if (
userConfig.liveMaxLatencyDurationCount !== undefined &&
(userConfig.liveSyncDurationCount === undefined ||
userConfig.liveMaxLatencyDurationCount <=
userConfig.liveSyncDurationCount)
) {
throw new Error(
'Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"',
);
}
if (
userConfig.liveMaxLatencyDuration !== undefined &&
(userConfig.liveSyncDuration === undefined ||
userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)
) {
throw new Error(
'Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"',
);
}
const defaultsCopy = deepCpy(defaultConfig);
// Backwards compatibility with deprecated config values
const deprecatedSettingTypes = ['manifest', 'level', 'frag'];
const deprecatedSettings = [
'TimeOut',
'MaxRetry',
'RetryDelay',
'MaxRetryTimeout',
];
deprecatedSettingTypes.forEach((type) => {
const policyName = `${type === 'level' ? 'playlist' : type}LoadPolicy`;
const policyNotSet = userConfig[policyName] === undefined;
const report: string[] = [];
deprecatedSettings.forEach((setting) => {
const deprecatedSetting = `${type}Loading${setting}`;
const value = userConfig[deprecatedSetting];
if (value !== undefined && policyNotSet) {
report.push(deprecatedSetting);
const settings: LoaderConfig = defaultsCopy[policyName].default;
userConfig[policyName] = { default: settings };
switch (setting) {
case 'TimeOut':
settings.maxLoadTimeMs = value;
settings.maxTimeToFirstByteMs = value;
break;
case 'MaxRetry':
settings.errorRetry!.maxNumRetry = value;
settings.timeoutRetry!.maxNumRetry = value;
break;
case 'RetryDelay':
settings.errorRetry!.retryDelayMs = value;
settings.timeoutRetry!.retryDelayMs = value;
break;
case 'MaxRetryTimeout':
settings.errorRetry!.maxRetryDelayMs = value;
settings.timeoutRetry!.maxRetryDelayMs = value;
break;
}
}
});
if (report.length) {
logger.warn(
`hls.js config: "${report.join(
'", "',
)}" setting(s) are deprecated, use "${policyName}": ${JSON.stringify(
userConfig[policyName],
)}`,
);
}
});
return {
...defaultsCopy,
...userConfig,
};
}
function deepCpy(obj: any): any {
if (obj && typeof obj === 'object') {
if (Array.isArray(obj)) {
return obj.map(deepCpy);
}
return Object.keys(obj).reduce((result, key) => {
result[key] = deepCpy(obj[key]);
return result;
}, {});
}
return obj;
}
/**
* @ignore
*/
export function enableStreamingMode(config) {
const currentLoader = config.loader;
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
// If a developer has configured their own loader, respect that choice
logger.log(
'[config]: Custom loader detected, cannot enable progressive streaming',
);
config.progressive = false;
} else {
const canStreamProgressively = fetchSupported();
if (canStreamProgressively) {
config.loader = FetchLoader;
config.progressive = true;
config.enableSoftwareAES = true;
logger.log('[config]: Progressive streaming enabled, using FetchLoader');
}
}
}

View File

@@ -0,0 +1,892 @@
import EwmaBandWidthEstimator from '../utils/ewma-bandwidth-estimator';
import { Events } from '../events';
import { ErrorDetails } from '../errors';
import { PlaylistLevelType } from '../types/loader';
import { logger } from '../utils/logger';
import {
SUPPORTED_INFO_DEFAULT,
getMediaDecodingInfoPromise,
requiresMediaCapabilitiesDecodingInfo,
} from '../utils/mediacapabilities-helper';
import {
getAudioTracksByGroup,
getCodecTiers,
getStartCodecTier,
type AudioTracksByGroup,
type CodecSetTier,
} from '../utils/rendition-helper';
import type { Fragment } from '../loader/fragment';
import type { Part } from '../loader/fragment';
import type { Level, VideoRange } from '../types/level';
import type { LoaderStats } from '../types/loader';
import type Hls from '../hls';
import type {
FragLoadingData,
FragLoadedData,
FragBufferedData,
LevelLoadedData,
LevelSwitchingData,
ManifestLoadingData,
ErrorData,
} from '../types/events';
import type { AbrComponentAPI } from '../types/component-api';
class AbrController implements AbrComponentAPI {
protected hls: Hls;
private lastLevelLoadSec: number = 0;
private lastLoadedFragLevel: number = -1;
private firstSelection: number = -1;
private _nextAutoLevel: number = -1;
private nextAutoLevelKey: string = '';
private audioTracksByGroup: AudioTracksByGroup | null = null;
private codecTiers: Record<string, CodecSetTier> | null = null;
private timer: number = -1;
private fragCurrent: Fragment | null = null;
private partCurrent: Part | null = null;
private bitrateTestDelay: number = 0;
public bwEstimator: EwmaBandWidthEstimator;
constructor(hls: Hls) {
this.hls = hls;
this.bwEstimator = this.initEstimator();
this.registerListeners();
}
public resetEstimator(abrEwmaDefaultEstimate?: number) {
if (abrEwmaDefaultEstimate) {
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
}
this.firstSelection = -1;
this.bwEstimator = this.initEstimator();
}
private initEstimator(): EwmaBandWidthEstimator {
const config = this.hls.config;
return new EwmaBandWidthEstimator(
config.abrEwmaSlowVoD,
config.abrEwmaFastVoD,
config.abrEwmaDefaultEstimate,
);
}
protected registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.on(Events.MAX_AUTO_LEVEL_UPDATED, this.onMaxAutoLevelUpdated, this);
hls.on(Events.ERROR, this.onError, this);
}
protected unregisterListeners() {
const { hls } = this;
if (!hls) {
return;
}
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.FRAG_LOADING, this.onFragLoading, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.off(Events.MAX_AUTO_LEVEL_UPDATED, this.onMaxAutoLevelUpdated, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this.unregisterListeners();
this.clearTimer();
// @ts-ignore
this.hls = this._abandonRulesCheck = null;
this.fragCurrent = this.partCurrent = null;
}
protected onManifestLoading(
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData,
) {
this.lastLoadedFragLevel = -1;
this.firstSelection = -1;
this.lastLevelLoadSec = 0;
this.fragCurrent = this.partCurrent = null;
this.onLevelsUpdated();
this.clearTimer();
}
private onLevelsUpdated() {
if (this.lastLoadedFragLevel > -1 && this.fragCurrent) {
this.lastLoadedFragLevel = this.fragCurrent.level;
}
this._nextAutoLevel = -1;
this.onMaxAutoLevelUpdated();
this.codecTiers = null;
this.audioTracksByGroup = null;
}
private onMaxAutoLevelUpdated() {
this.firstSelection = -1;
this.nextAutoLevelKey = '';
}
protected onFragLoading(event: Events.FRAG_LOADING, data: FragLoadingData) {
const frag = data.frag;
if (this.ignoreFragment(frag)) {
return;
}
if (!frag.bitrateTest) {
this.fragCurrent = frag;
this.partCurrent = data.part ?? null;
}
this.clearTimer();
this.timer = self.setInterval(this._abandonRulesCheck, 100);
}
protected onLevelSwitching(
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData,
): void {
this.clearTimer();
}
protected onError(event: Events.ERROR, data: ErrorData) {
if (data.fatal) {
return;
}
switch (data.details) {
case ErrorDetails.BUFFER_ADD_CODEC_ERROR:
case ErrorDetails.BUFFER_APPEND_ERROR:
// Reset last loaded level so that a new selection can be made after calling recoverMediaError
this.lastLoadedFragLevel = -1;
this.firstSelection = -1;
break;
case ErrorDetails.FRAG_LOAD_TIMEOUT: {
const frag = data.frag;
const { fragCurrent, partCurrent: part } = this;
if (
frag &&
fragCurrent &&
frag.sn === fragCurrent.sn &&
frag.level === fragCurrent.level
) {
const now = performance.now();
const stats: LoaderStats = part ? part.stats : frag.stats;
const timeLoading = now - stats.loading.start;
const ttfb = stats.loading.first
? stats.loading.first - stats.loading.start
: -1;
const loadedFirstByte = stats.loaded && ttfb > -1;
if (loadedFirstByte) {
const ttfbEstimate = this.bwEstimator.getEstimateTTFB();
this.bwEstimator.sample(
timeLoading - Math.min(ttfbEstimate, ttfb),
stats.loaded,
);
} else {
this.bwEstimator.sampleTTFB(timeLoading);
}
}
break;
}
}
}
private getTimeToLoadFrag(
timeToFirstByteSec: number,
bandwidth: number,
fragSizeBits: number,
isSwitch: boolean,
): number {
const fragLoadSec = timeToFirstByteSec + fragSizeBits / bandwidth;
const playlistLoadSec = isSwitch ? this.lastLevelLoadSec : 0;
return fragLoadSec + playlistLoadSec;
}
protected onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
const config = this.hls.config;
const { loading } = data.stats;
const timeLoadingMs = loading.end - loading.start;
if (Number.isFinite(timeLoadingMs)) {
this.lastLevelLoadSec = timeLoadingMs / 1000;
}
if (data.details.live) {
this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive);
} else {
this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD);
}
}
/*
This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
quickly enough to prevent underbuffering
*/
private _abandonRulesCheck = () => {
const { fragCurrent: frag, partCurrent: part, hls } = this;
const { autoLevelEnabled, media } = hls;
if (!frag || !media) {
return;
}
const now = performance.now();
const stats: LoaderStats = part ? part.stats : frag.stats;
const duration = part ? part.duration : frag.duration;
const timeLoading = now - stats.loading.start;
const minAutoLevel = hls.minAutoLevel;
// If frag loading is aborted, complete, or from lowest level, stop timer and return
if (
stats.aborted ||
(stats.loaded && stats.loaded === stats.total) ||
frag.level <= minAutoLevel
) {
this.clearTimer();
// reset forced auto level value so that next level will be selected
this._nextAutoLevel = -1;
return;
}
// This check only runs if we're in ABR mode and actually playing
if (
!autoLevelEnabled ||
media.paused ||
!media.playbackRate ||
!media.readyState
) {
return;
}
const bufferInfo = hls.mainForwardBufferInfo;
if (bufferInfo === null) {
return;
}
const ttfbEstimate = this.bwEstimator.getEstimateTTFB();
const playbackRate = Math.abs(media.playbackRate);
// To maintain stable adaptive playback, only begin monitoring frag loading after half or more of its playback duration has passed
if (
timeLoading <=
Math.max(ttfbEstimate, 1000 * (duration / (playbackRate * 2)))
) {
return;
}
// bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
const bufferStarvationDelay = bufferInfo.len / playbackRate;
const ttfb = stats.loading.first
? stats.loading.first - stats.loading.start
: -1;
const loadedFirstByte = stats.loaded && ttfb > -1;
const bwEstimate: number = this.getBwEstimate();
const levels = hls.levels;
const level = levels[frag.level];
const expectedLen =
stats.total ||
Math.max(stats.loaded, Math.round((duration * level.averageBitrate) / 8));
let timeStreaming = loadedFirstByte ? timeLoading - ttfb : timeLoading;
if (timeStreaming < 1 && loadedFirstByte) {
timeStreaming = Math.min(timeLoading, (stats.loaded * 8) / bwEstimate);
}
const loadRate = loadedFirstByte
? (stats.loaded * 1000) / timeStreaming
: 0;
// fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the remainder of the fragment
const fragLoadedDelay = loadRate
? (expectedLen - stats.loaded) / loadRate
: (expectedLen * 8) / bwEstimate + ttfbEstimate / 1000;
// Only downswitch if the time to finish loading the current fragment is greater than the amount of buffer left
if (fragLoadedDelay <= bufferStarvationDelay) {
return;
}
const bwe = loadRate ? loadRate * 8 : bwEstimate;
let fragLevelNextLoadedDelay: number = Number.POSITIVE_INFINITY;
let nextLoadLevel: number;
// Iterate through lower level and try to find the largest one that avoids rebuffering
for (
nextLoadLevel = frag.level - 1;
nextLoadLevel > minAutoLevel;
nextLoadLevel--
) {
// compute time to load next fragment at lower level
// 8 = bits per byte (bps/Bps)
const levelNextBitrate = levels[nextLoadLevel].maxBitrate;
fragLevelNextLoadedDelay = this.getTimeToLoadFrag(
ttfbEstimate / 1000,
bwe,
duration * levelNextBitrate,
!levels[nextLoadLevel].details,
);
if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
break;
}
}
// Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
// to load the current one
if (fragLevelNextLoadedDelay >= fragLoadedDelay) {
return;
}
// if estimated load time of new segment is completely unreasonable, ignore and do not emergency switch down
if (fragLevelNextLoadedDelay > duration * 10) {
return;
}
hls.nextLoadLevel = hls.nextAutoLevel = nextLoadLevel;
if (loadedFirstByte) {
// If there has been loading progress, sample bandwidth using loading time offset by minimum TTFB time
this.bwEstimator.sample(
timeLoading - Math.min(ttfbEstimate, ttfb),
stats.loaded,
);
} else {
// If there has been no loading progress, sample TTFB
this.bwEstimator.sampleTTFB(timeLoading);
}
const nextLoadLevelBitrate = levels[nextLoadLevel].maxBitrate;
if (
this.getBwEstimate() * this.hls.config.abrBandWidthUpFactor >
nextLoadLevelBitrate
) {
this.resetEstimator(nextLoadLevelBitrate);
}
this.clearTimer();
logger.warn(`[abr] Fragment ${frag.sn}${
part ? ' part ' + part.index : ''
} of level ${frag.level} is loading too slowly;
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(
3,
)} s
TTFB estimate: ${ttfb | 0} ms
Current BW estimate: ${
Number.isFinite(bwEstimate) ? bwEstimate | 0 : 'Unknown'
} bps
New BW estimate: ${this.getBwEstimate() | 0} bps
Switching to level ${nextLoadLevel} @ ${nextLoadLevelBitrate | 0} bps`);
hls.trigger(Events.FRAG_LOAD_EMERGENCY_ABORTED, { frag, part, stats });
};
protected onFragLoaded(
event: Events.FRAG_LOADED,
{ frag, part }: FragLoadedData,
) {
const stats = part ? part.stats : frag.stats;
if (frag.type === PlaylistLevelType.MAIN) {
this.bwEstimator.sampleTTFB(stats.loading.first - stats.loading.start);
}
if (this.ignoreFragment(frag)) {
return;
}
// stop monitoring bw once frag loaded
this.clearTimer();
// reset forced auto level value so that next level will be selected
if (frag.level === this._nextAutoLevel) {
this._nextAutoLevel = -1;
}
this.firstSelection = -1;
// compute level average bitrate
if (this.hls.config.abrMaxWithRealBitrate) {
const duration = part ? part.duration : frag.duration;
const level = this.hls.levels[frag.level];
const loadedBytes =
(level.loaded ? level.loaded.bytes : 0) + stats.loaded;
const loadedDuration =
(level.loaded ? level.loaded.duration : 0) + duration;
level.loaded = { bytes: loadedBytes, duration: loadedDuration };
level.realBitrate = Math.round((8 * loadedBytes) / loadedDuration);
}
if (frag.bitrateTest) {
const fragBufferedData: FragBufferedData = {
stats,
frag,
part,
id: frag.type,
};
this.onFragBuffered(Events.FRAG_BUFFERED, fragBufferedData);
frag.bitrateTest = false;
} else {
// store level id after successful fragment load for playback
this.lastLoadedFragLevel = frag.level;
}
}
protected onFragBuffered(
event: Events.FRAG_BUFFERED,
data: FragBufferedData,
) {
const { frag, part } = data;
const stats = part?.stats.loaded ? part.stats : frag.stats;
if (stats.aborted) {
return;
}
if (this.ignoreFragment(frag)) {
return;
}
// Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
// rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
// is used. If we used buffering in that case, our BW estimate sample will be very large.
const processingMs =
stats.parsing.end -
stats.loading.start -
Math.min(
stats.loading.first - stats.loading.start,
this.bwEstimator.getEstimateTTFB(),
);
this.bwEstimator.sample(processingMs, stats.loaded);
stats.bwEstimate = this.getBwEstimate();
if (frag.bitrateTest) {
this.bitrateTestDelay = processingMs / 1000;
} else {
this.bitrateTestDelay = 0;
}
}
private ignoreFragment(frag: Fragment): boolean {
// Only count non-alt-audio frags which were actually buffered in our BW calculations
return frag.type !== PlaylistLevelType.MAIN || frag.sn === 'initSegment';
}
public clearTimer() {
if (this.timer > -1) {
self.clearInterval(this.timer);
this.timer = -1;
}
}
public get firstAutoLevel(): number {
const { maxAutoLevel, minAutoLevel } = this.hls;
const bwEstimate = this.getBwEstimate();
const maxStartDelay = this.hls.config.maxStarvationDelay;
const abrAutoLevel = this.findBestLevel(
bwEstimate,
minAutoLevel,
maxAutoLevel,
0,
maxStartDelay,
1,
1,
);
if (abrAutoLevel > -1) {
return abrAutoLevel;
}
const firstLevel = this.hls.firstLevel;
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
logger.warn(
`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`,
);
return clamped;
}
public get forcedAutoLevel(): number {
if (this.nextAutoLevelKey) {
return -1;
}
return this._nextAutoLevel;
}
// return next auto level
public get nextAutoLevel(): number {
const forcedAutoLevel = this.forcedAutoLevel;
const bwEstimator = this.bwEstimator;
const useEstimate = bwEstimator.canEstimate();
const loadedFirstFrag = this.lastLoadedFragLevel > -1;
// in case next auto level has been forced, and bw not available or not reliable, return forced value
if (
forcedAutoLevel !== -1 &&
(!useEstimate ||
!loadedFirstFrag ||
this.nextAutoLevelKey === this.getAutoLevelKey())
) {
return forcedAutoLevel;
}
// compute next level using ABR logic
const nextABRAutoLevel =
useEstimate && loadedFirstFrag
? this.getNextABRAutoLevel()
: this.firstAutoLevel;
// use forced auto level while it hasn't errored more than ABR selection
if (forcedAutoLevel !== -1) {
const levels = this.hls.levels;
if (
levels.length > Math.max(forcedAutoLevel, nextABRAutoLevel) &&
levels[forcedAutoLevel].loadError <= levels[nextABRAutoLevel].loadError
) {
return forcedAutoLevel;
}
}
// save result until state has changed
this._nextAutoLevel = nextABRAutoLevel;
this.nextAutoLevelKey = this.getAutoLevelKey();
return nextABRAutoLevel;
}
private getAutoLevelKey(): string {
return `${this.getBwEstimate()}_${this.getStarvationDelay().toFixed(2)}`;
}
private getNextABRAutoLevel(): number {
const { fragCurrent, partCurrent, hls } = this;
const { maxAutoLevel, config, minAutoLevel } = hls;
const currentFragDuration = partCurrent
? partCurrent.duration
: fragCurrent
? fragCurrent.duration
: 0;
const avgbw = this.getBwEstimate();
// bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
const bufferStarvationDelay = this.getStarvationDelay();
let bwFactor = config.abrBandWidthFactor;
let bwUpFactor = config.abrBandWidthUpFactor;
// First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
if (bufferStarvationDelay) {
const bestLevel = this.findBestLevel(
avgbw,
minAutoLevel,
maxAutoLevel,
bufferStarvationDelay,
0,
bwFactor,
bwUpFactor,
);
if (bestLevel >= 0) {
return bestLevel;
}
}
// not possible to get rid of rebuffering... try to find level that will guarantee less than maxStarvationDelay of rebuffering
let maxStarvationDelay = currentFragDuration
? Math.min(currentFragDuration, config.maxStarvationDelay)
: config.maxStarvationDelay;
if (!bufferStarvationDelay) {
// in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
const bitrateTestDelay = this.bitrateTestDelay;
if (bitrateTestDelay) {
// if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
// max video loading delay used in automatic start level selection :
// in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
// the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
const maxLoadingDelay = currentFragDuration
? Math.min(currentFragDuration, config.maxLoadingDelay)
: config.maxLoadingDelay;
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
logger.info(
`[abr] bitrate test took ${Math.round(
1000 * bitrateTestDelay,
)}ms, set first fragment max fetchDuration to ${Math.round(
1000 * maxStarvationDelay,
)} ms`,
);
// don't use conservative factor on bitrate test
bwFactor = bwUpFactor = 1;
}
}
const bestLevel = this.findBestLevel(
avgbw,
minAutoLevel,
maxAutoLevel,
bufferStarvationDelay,
maxStarvationDelay,
bwFactor,
bwUpFactor,
);
logger.info(
`[abr] ${
bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'
}, optimal quality level ${bestLevel}`,
);
if (bestLevel > -1) {
return bestLevel;
}
// If no matching level found, see if min auto level would be a better option
const minLevel = hls.levels[minAutoLevel];
const autoLevel = hls.levels[hls.loadLevel];
if (minLevel?.bitrate < autoLevel?.bitrate) {
return minAutoLevel;
}
// or if bitrate is not lower, continue to use loadLevel
return hls.loadLevel;
}
private getStarvationDelay(): number {
const hls = this.hls;
const media = hls.media;
if (!media) {
return Infinity;
}
// playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
// if we're playing back at the normal rate.
const playbackRate =
media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0;
const bufferInfo = hls.mainForwardBufferInfo;
return (bufferInfo ? bufferInfo.len : 0) / playbackRate;
}
private getBwEstimate(): number {
return this.bwEstimator.canEstimate()
? this.bwEstimator.getEstimate()
: this.hls.config.abrEwmaDefaultEstimate;
}
private findBestLevel(
currentBw: number,
minAutoLevel: number,
maxAutoLevel: number,
bufferStarvationDelay: number,
maxStarvationDelay: number,
bwFactor: number,
bwUpFactor: number,
): number {
const maxFetchDuration: number = bufferStarvationDelay + maxStarvationDelay;
const lastLoadedFragLevel = this.lastLoadedFragLevel;
const selectionBaseLevel =
lastLoadedFragLevel === -1 ? this.hls.firstLevel : lastLoadedFragLevel;
const { fragCurrent, partCurrent } = this;
const { levels, allAudioTracks, loadLevel, config } = this.hls;
if (levels.length === 1) {
return 0;
}
const level: Level | undefined = levels[selectionBaseLevel];
const live = !!level?.details?.live;
const firstSelection = loadLevel === -1 || lastLoadedFragLevel === -1;
let currentCodecSet: string | undefined;
let currentVideoRange: VideoRange | undefined = 'SDR';
let currentFrameRate = level?.frameRate || 0;
const { audioPreference, videoPreference } = config;
const audioTracksByGroup =
this.audioTracksByGroup ||
(this.audioTracksByGroup = getAudioTracksByGroup(allAudioTracks));
if (firstSelection) {
if (this.firstSelection !== -1) {
return this.firstSelection;
}
const codecTiers =
this.codecTiers ||
(this.codecTiers = getCodecTiers(
levels,
audioTracksByGroup,
minAutoLevel,
maxAutoLevel,
));
const startTier = getStartCodecTier(
codecTiers,
currentVideoRange,
currentBw,
audioPreference,
videoPreference,
);
const { codecSet, videoRanges, minFramerate, minBitrate, preferHDR } =
startTier;
currentCodecSet = codecSet;
currentVideoRange = preferHDR
? videoRanges[videoRanges.length - 1]
: videoRanges[0];
currentFrameRate = minFramerate;
currentBw = Math.max(currentBw, minBitrate);
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
} else {
currentCodecSet = level?.codecSet;
currentVideoRange = level?.videoRange;
}
const currentFragDuration = partCurrent
? partCurrent.duration
: fragCurrent
? fragCurrent.duration
: 0;
const ttfbEstimateSec = this.bwEstimator.getEstimateTTFB() / 1000;
const levelsSkipped: number[] = [];
for (let i = maxAutoLevel; i >= minAutoLevel; i--) {
const levelInfo = levels[i];
const upSwitch = i > selectionBaseLevel;
if (!levelInfo) {
continue;
}
if (
__USE_MEDIA_CAPABILITIES__ &&
config.useMediaCapabilities &&
!levelInfo.supportedResult &&
!levelInfo.supportedPromise
) {
const mediaCapabilities = navigator.mediaCapabilities as
| MediaCapabilities
| undefined;
if (
typeof mediaCapabilities?.decodingInfo === 'function' &&
requiresMediaCapabilitiesDecodingInfo(
levelInfo,
audioTracksByGroup,
currentVideoRange,
currentFrameRate,
currentBw,
audioPreference,
)
) {
levelInfo.supportedPromise = getMediaDecodingInfoPromise(
levelInfo,
audioTracksByGroup,
mediaCapabilities,
);
levelInfo.supportedPromise.then((decodingInfo) => {
if (!this.hls) {
return;
}
levelInfo.supportedResult = decodingInfo;
const levels = this.hls.levels;
const index = levels.indexOf(levelInfo);
if (decodingInfo.error) {
logger.warn(
`[abr] MediaCapabilities decodingInfo error: "${
decodingInfo.error
}" for level ${index} ${JSON.stringify(decodingInfo)}`,
);
} else if (!decodingInfo.supported) {
logger.warn(
`[abr] Unsupported MediaCapabilities decodingInfo result for level ${index} ${JSON.stringify(
decodingInfo,
)}`,
);
if (index > -1 && levels.length > 1) {
logger.log(`[abr] Removing unsupported level ${index}`);
this.hls.removeLevel(index);
}
}
});
} else {
levelInfo.supportedResult = SUPPORTED_INFO_DEFAULT;
}
}
// skip candidates which change codec-family or video-range,
// and which decrease or increase frame-rate for up and down-switch respectfully
if (
(currentCodecSet && levelInfo.codecSet !== currentCodecSet) ||
(currentVideoRange && levelInfo.videoRange !== currentVideoRange) ||
(upSwitch && currentFrameRate > levelInfo.frameRate) ||
(!upSwitch &&
currentFrameRate > 0 &&
currentFrameRate < levelInfo.frameRate) ||
(levelInfo.supportedResult &&
!levelInfo.supportedResult.decodingInfoResults?.[0].smooth)
) {
levelsSkipped.push(i);
continue;
}
const levelDetails = levelInfo.details;
const avgDuration =
(partCurrent
? levelDetails?.partTarget
: levelDetails?.averagetargetduration) || currentFragDuration;
let adjustedbw: number;
// follow algorithm captured from stagefright :
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
// consider only 80% of the available bandwidth, but if we are switching up,
// be even more conservative (70%) to avoid overestimating and immediately
// switching back.
if (!upSwitch) {
adjustedbw = bwFactor * currentBw;
} else {
adjustedbw = bwUpFactor * currentBw;
}
// Use average bitrate when starvation delay (buffer length) is gt or eq two segment durations and rebuffering is not expected (maxStarvationDelay > 0)
const bitrate: number =
currentFragDuration &&
bufferStarvationDelay >= currentFragDuration * 2 &&
maxStarvationDelay === 0
? levels[i].averageBitrate
: levels[i].maxBitrate;
const fetchDuration: number = this.getTimeToLoadFrag(
ttfbEstimateSec,
adjustedbw,
bitrate * avgDuration,
levelDetails === undefined,
);
const canSwitchWithinTolerance =
// if adjusted bw is greater than level bitrate AND
adjustedbw >= bitrate &&
// no level change, or new level has no error history
(i === lastLoadedFragLevel ||
(levelInfo.loadError === 0 && levelInfo.fragmentError === 0)) &&
// fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
// we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
// special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
(fetchDuration <= ttfbEstimateSec ||
!Number.isFinite(fetchDuration) ||
(live && !this.bitrateTestDelay) ||
fetchDuration < maxFetchDuration);
if (canSwitchWithinTolerance) {
const forcedAutoLevel = this.forcedAutoLevel;
if (
i !== loadLevel &&
(forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)
) {
if (levelsSkipped.length) {
logger.trace(
`[abr] Skipped level(s) ${levelsSkipped.join(
',',
)} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${
levels[levelsSkipped[0]].codecs
}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${
level.codecs
}" ${currentVideoRange}`,
);
}
logger.info(
`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(
adjustedbw,
)})-bitrate=${Math.round(
adjustedbw - bitrate,
)} ttfb:${ttfbEstimateSec.toFixed(
1,
)} avgDuration:${avgDuration.toFixed(
1,
)} maxFetchDuration:${maxFetchDuration.toFixed(
1,
)} fetchDuration:${fetchDuration.toFixed(
1,
)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`,
);
}
if (firstSelection) {
this.firstSelection = i;
}
// as we are looping from highest to lowest, this will return the best achievable quality level
return i;
}
}
// not enough time budget even with quality level 0 ... rebuffering might happen
return -1;
}
public set nextAutoLevel(nextLevel: number) {
const { maxAutoLevel, minAutoLevel } = this.hls;
const value = Math.min(Math.max(nextLevel, minAutoLevel), maxAutoLevel);
if (this._nextAutoLevel !== value) {
this.nextAutoLevelKey = '';
this._nextAutoLevel = value;
}
}
}
export default AbrController;

View File

@@ -0,0 +1,981 @@
import BaseStreamController, { State } from './base-stream-controller';
import { Events } from '../events';
import { Bufferable, BufferHelper } from '../utils/buffer-helper';
import { FragmentState } from './fragment-tracker';
import { Level } from '../types/level';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import { Fragment, ElementaryStreamTypes, Part } from '../loader/fragment';
import ChunkCache from '../demux/chunk-cache';
import TransmuxerInterface from '../demux/transmuxer-interface';
import { ChunkMetadata } from '../types/transmuxer';
import { fragmentWithinToleranceTest } from './fragment-finders';
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
import { ErrorDetails } from '../errors';
import { audioMatchPredicate, matchesOption } from '../utils/rendition-helper';
import type { NetworkComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { FragmentTracker } from './fragment-tracker';
import type KeyLoader from '../loader/key-loader';
import type { TransmuxerResult } from '../types/transmuxer';
import type { LevelDetails } from '../loader/level-details';
import type { TrackSet } from '../types/track';
import type {
BufferCreatedData,
AudioTracksUpdatedData,
AudioTrackSwitchingData,
LevelLoadedData,
TrackLoadedData,
BufferAppendingData,
BufferFlushedData,
InitPTSFoundData,
FragLoadedData,
FragParsingMetadataData,
FragParsingUserdataData,
FragBufferedData,
ErrorData,
BufferFlushingData,
} from '../types/events';
import type { MediaPlaylist } from '../types/media-playlist';
const TICK_INTERVAL = 100; // how often to tick in ms
type WaitingForPTSData = {
frag: Fragment;
part: Part | null;
cache: ChunkCache;
complete: boolean;
};
class AudioStreamController
extends BaseStreamController
implements NetworkComponentAPI
{
private videoBuffer: Bufferable | null = null;
private videoTrackCC: number = -1;
private waitingVideoCC: number = -1;
private bufferedTrack: MediaPlaylist | null = null;
private switchingTrack: MediaPlaylist | null = null;
private trackId: number = -1;
private waitingData: WaitingForPTSData | null = null;
private mainDetails: LevelDetails | null = null;
private flushing: boolean = false;
private bufferFlushed: boolean = false;
private cachedTrackLoadedData: TrackLoadedData | null = null;
constructor(
hls: Hls,
fragmentTracker: FragmentTracker,
keyLoader: KeyLoader,
) {
super(
hls,
fragmentTracker,
keyLoader,
'[audio-stream-controller]',
PlaylistLevelType.AUDIO,
);
this._registerListeners();
}
protected onHandlerDestroying() {
this._unregisterListeners();
super.onHandlerDestroying();
this.mainDetails = null;
this.bufferedTrack = null;
this.switchingTrack = null;
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
// INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
onInitPtsFound(
event: Events.INIT_PTS_FOUND,
{ frag, id, initPTS, timescale }: InitPTSFoundData,
) {
// Always update the new INIT PTS
// Can change due level switch
if (id === 'main') {
const cc = frag.cc;
this.initPTS[frag.cc] = { baseTime: initPTS, timescale };
this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);
this.videoTrackCC = cc;
// If we are waiting, tick immediately to unblock audio fragment transmuxing
if (this.state === State.WAITING_INIT_PTS) {
this.tick();
}
}
}
startLoad(startPosition: number) {
if (!this.levels) {
this.startPosition = startPosition;
this.state = State.STOPPED;
return;
}
const lastCurrentTime = this.lastCurrentTime;
this.stopLoad();
this.setInterval(TICK_INTERVAL);
if (lastCurrentTime > 0 && startPosition === -1) {
this.log(
`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
3,
)}`,
);
startPosition = lastCurrentTime;
this.state = State.IDLE;
} else {
this.loadedmetadata = false;
this.state = State.WAITING_TRACK;
}
this.nextLoadPosition =
this.startPosition =
this.lastCurrentTime =
startPosition;
this.tick();
}
doTick() {
switch (this.state) {
case State.IDLE:
this.doTickIdle();
break;
case State.WAITING_TRACK: {
const { levels, trackId } = this;
const details = levels?.[trackId]?.details;
if (details) {
if (this.waitForCdnTuneIn(details)) {
break;
}
this.state = State.WAITING_INIT_PTS;
}
break;
}
case State.FRAG_LOADING_WAITING_RETRY: {
const now = performance.now();
const retryDate = this.retryDate;
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
if (!retryDate || now >= retryDate || this.media?.seeking) {
const { levels, trackId } = this;
this.log('RetryDate reached, switch back to IDLE state');
this.resetStartWhenNotLoaded(levels?.[trackId] || null);
this.state = State.IDLE;
}
break;
}
case State.WAITING_INIT_PTS: {
// Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
const waitingData = this.waitingData;
if (waitingData) {
const { frag, part, cache, complete } = waitingData;
if (this.initPTS[frag.cc] !== undefined) {
this.waitingData = null;
this.waitingVideoCC = -1;
this.state = State.FRAG_LOADING;
const payload = cache.flush();
const data: FragLoadedData = {
frag,
part,
payload,
networkDetails: null,
};
this._handleFragmentLoadProgress(data);
if (complete) {
super._handleFragmentLoadComplete(data);
}
} else if (this.videoTrackCC !== this.waitingVideoCC) {
// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
this.log(
`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`,
);
this.clearWaitingFragment();
} else {
// Drop waiting fragment if an earlier fragment is needed
const pos = this.getLoadPosition();
const bufferInfo = BufferHelper.bufferInfo(
this.mediaBuffer,
pos,
this.config.maxBufferHole,
);
const waitingFragmentAtPosition = fragmentWithinToleranceTest(
bufferInfo.end,
this.config.maxFragLookUpTolerance,
frag,
);
if (waitingFragmentAtPosition < 0) {
this.log(
`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`,
);
this.clearWaitingFragment();
}
}
} else {
this.state = State.IDLE;
}
}
}
this.onTickEnd();
}
clearWaitingFragment() {
const waitingData = this.waitingData;
if (waitingData) {
this.fragmentTracker.removeFragment(waitingData.frag);
this.waitingData = null;
this.waitingVideoCC = -1;
this.state = State.IDLE;
}
}
protected resetLoadingState() {
this.clearWaitingFragment();
super.resetLoadingState();
}
protected onTickEnd() {
const { media } = this;
if (!media?.readyState) {
// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
return;
}
this.lastCurrentTime = media.currentTime;
}
private doTickIdle() {
const { hls, levels, media, trackId } = this;
const config = hls.config;
// 1. if buffering is suspended
// 2. if video not attached AND
// start fragment already requested OR start frag prefetch not enabled
// 3. if tracks or track not loaded and selected
// then exit loop
// => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
if (
!this.buffering ||
(!media && (this.startFragRequested || !config.startFragPrefetch)) ||
!levels?.[trackId]
) {
return;
}
const levelInfo = levels[trackId];
const trackDetails = levelInfo.details;
if (
!trackDetails ||
(trackDetails.live && this.levelLastLoaded !== levelInfo) ||
this.waitForCdnTuneIn(trackDetails)
) {
this.state = State.WAITING_TRACK;
return;
}
const bufferable = this.mediaBuffer ? this.mediaBuffer : this.media;
if (this.bufferFlushed && bufferable) {
this.bufferFlushed = false;
this.afterBufferFlushed(
bufferable,
ElementaryStreamTypes.AUDIO,
PlaylistLevelType.AUDIO,
);
}
const bufferInfo = this.getFwdBufferInfo(
bufferable,
PlaylistLevelType.AUDIO,
);
if (bufferInfo === null) {
return;
}
if (!this.switchingTrack && this._streamEnded(bufferInfo, trackDetails)) {
hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
this.state = State.ENDED;
return;
}
const mainBufferInfo = this.getFwdBufferInfo(
this.videoBuffer ? this.videoBuffer : this.media,
PlaylistLevelType.MAIN,
);
const bufferLen = bufferInfo.len;
const maxBufLen = this.getMaxBufferLength(mainBufferInfo?.len);
const fragments = trackDetails.fragments;
const start = fragments[0].start;
const loadPosition = this.getLoadPosition();
const targetBufferTime = this.flushing ? loadPosition : bufferInfo.end;
if (this.switchingTrack && media) {
const pos = loadPosition;
// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
if (trackDetails.PTSKnown && pos < start) {
// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
if (bufferInfo.end > start || bufferInfo.nextStart) {
this.log(
'Alt audio track ahead of main track, seek to start of alt audio track',
);
media.currentTime = start + 0.05;
}
}
}
// if buffer length is less than maxBufLen, or near the end, find a fragment to load
if (
bufferLen >= maxBufLen &&
!this.switchingTrack &&
targetBufferTime < fragments[fragments.length - 1].start
) {
return;
}
let frag = this.getNextFragment(targetBufferTime, trackDetails);
let atGap = false;
// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
if (frag && this.isLoopLoading(frag, targetBufferTime)) {
atGap = !!frag.gap;
frag = this.getNextFragmentLoopLoading(
frag,
trackDetails,
bufferInfo,
PlaylistLevelType.MAIN,
maxBufLen,
);
}
if (!frag) {
this.bufferFlushed = true;
return;
}
// Buffer audio up to one target duration ahead of main buffer
const atBufferSyncLimit =
mainBufferInfo &&
frag.start > mainBufferInfo.end + trackDetails.targetduration;
if (
atBufferSyncLimit ||
// Or wait for main buffer after buffing some audio
(!mainBufferInfo?.len && bufferInfo.len)
) {
// Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo
const mainFrag = this.getAppendedFrag(frag.start, PlaylistLevelType.MAIN);
if (mainFrag === null) {
return;
}
// Bridge gaps in main buffer
atGap ||=
!!mainFrag.gap || (!!atBufferSyncLimit && mainBufferInfo.len === 0);
if (
(atBufferSyncLimit && !atGap) ||
(atGap && bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end)
) {
return;
}
}
this.loadFragment(frag, levelInfo, targetBufferTime);
}
protected getMaxBufferLength(mainBufferLength?: number): number {
const maxConfigBuffer = super.getMaxBufferLength();
if (!mainBufferLength) {
return maxConfigBuffer;
}
return Math.min(
Math.max(maxConfigBuffer, mainBufferLength),
this.config.maxMaxBufferLength,
);
}
onMediaDetaching() {
this.videoBuffer = null;
this.bufferFlushed = this.flushing = false;
super.onMediaDetaching();
}
onAudioTracksUpdated(
event: Events.AUDIO_TRACKS_UPDATED,
{ audioTracks }: AudioTracksUpdatedData,
) {
// Reset tranxmuxer is essential for large context switches (Content Steering)
this.resetTransmuxer();
this.levels = audioTracks.map((mediaPlaylist) => new Level(mediaPlaylist));
}
onAudioTrackSwitching(
event: Events.AUDIO_TRACK_SWITCHING,
data: AudioTrackSwitchingData,
) {
// if any URL found on new audio track, it is an alternate audio track
const altAudio = !!data.url;
this.trackId = data.id;
const { fragCurrent } = this;
if (fragCurrent) {
fragCurrent.abortRequests();
this.removeUnbufferedFrags(fragCurrent.start);
}
this.resetLoadingState();
// destroy useless transmuxer when switching audio to main
if (!altAudio) {
this.resetTransmuxer();
} else {
// switching to audio track, start timer if not already started
this.setInterval(TICK_INTERVAL);
}
// should we switch tracks ?
if (altAudio) {
this.switchingTrack = data;
// main audio track are handled by stream-controller, just do something if switching to alt audio track
this.state = State.IDLE;
this.flushAudioIfNeeded(data);
} else {
this.switchingTrack = null;
this.bufferedTrack = data;
this.state = State.STOPPED;
}
this.tick();
}
onManifestLoading() {
this.fragmentTracker.removeAllFragments();
this.startPosition = this.lastCurrentTime = 0;
this.bufferFlushed = this.flushing = false;
this.levels =
this.mainDetails =
this.waitingData =
this.bufferedTrack =
this.cachedTrackLoadedData =
this.switchingTrack =
null;
this.startFragRequested = false;
this.trackId = this.videoTrackCC = this.waitingVideoCC = -1;
}
onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
this.mainDetails = data.details;
if (this.cachedTrackLoadedData !== null) {
this.hls.trigger(Events.AUDIO_TRACK_LOADED, this.cachedTrackLoadedData);
this.cachedTrackLoadedData = null;
}
}
onAudioTrackLoaded(event: Events.AUDIO_TRACK_LOADED, data: TrackLoadedData) {
if (this.mainDetails == null) {
this.cachedTrackLoadedData = data;
return;
}
const { levels } = this;
const { details: newDetails, id: trackId } = data;
if (!levels) {
this.warn(`Audio tracks were reset while loading level ${trackId}`);
return;
}
this.log(
`Audio track ${trackId} loaded [${newDetails.startSN},${
newDetails.endSN
}]${
newDetails.lastPartSn
? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`
: ''
},duration:${newDetails.totalduration}`,
);
const track = levels[trackId];
let sliding = 0;
if (newDetails.live || track.details?.live) {
this.checkLiveUpdate(newDetails);
const mainDetails = this.mainDetails;
if (newDetails.deltaUpdateFailed || !mainDetails) {
return;
}
if (
!track.details &&
newDetails.hasProgramDateTime &&
mainDetails.hasProgramDateTime
) {
// Make sure our audio rendition is aligned with the "main" rendition, using
// pdt as our reference times.
alignMediaPlaylistByPDT(newDetails, mainDetails);
sliding = newDetails.fragments[0].start;
} else {
sliding = this.alignPlaylists(
newDetails,
track.details,
this.levelLastLoaded?.details,
);
}
}
track.details = newDetails;
this.levelLastLoaded = track;
// compute start position if we are aligned with the main playlist
if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
this.setStartPosition(this.mainDetails || newDetails, sliding);
}
// only switch back to IDLE state if we were waiting for track to start downloading a new fragment
if (
this.state === State.WAITING_TRACK &&
!this.waitForCdnTuneIn(newDetails)
) {
this.state = State.IDLE;
}
// trigger handler right now
this.tick();
}
_handleFragmentLoadProgress(data: FragLoadedData) {
const { frag, part, payload } = data;
const { config, trackId, levels } = this;
if (!levels) {
this.warn(
`Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`,
);
return;
}
const track = levels[trackId] as Level;
if (!track) {
this.warn('Audio track is undefined on fragment load progress');
return;
}
const details = track.details as LevelDetails;
if (!details) {
this.warn('Audio track details undefined on fragment load progress');
this.removeUnbufferedFrags(frag.start);
return;
}
const audioCodec =
config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
let transmuxer = this.transmuxer;
if (!transmuxer) {
transmuxer = this.transmuxer = new TransmuxerInterface(
this.hls,
PlaylistLevelType.AUDIO,
this._handleTransmuxComplete.bind(this),
this._handleTransmuxerFlush.bind(this),
);
}
// Check if we have video initPTS
// If not we need to wait for it
const initPTS = this.initPTS[frag.cc];
const initSegmentData = frag.initSegment?.data;
if (initPTS !== undefined) {
// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
const accurateTimeOffset = false; // details.PTSKnown || !details.live;
const partIndex = part ? part.index : -1;
const partial = partIndex !== -1;
const chunkMeta = new ChunkMetadata(
frag.level,
frag.sn as number,
frag.stats.chunkCount,
payload.byteLength,
partIndex,
partial,
);
transmuxer.push(
payload,
initSegmentData,
audioCodec,
'',
frag,
part,
details.totalduration,
accurateTimeOffset,
chunkMeta,
initPTS,
);
} else {
this.log(
`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`,
);
const { cache } = (this.waitingData = this.waitingData || {
frag,
part,
cache: new ChunkCache(),
complete: false,
});
cache.push(new Uint8Array(payload));
this.waitingVideoCC = this.videoTrackCC;
this.state = State.WAITING_INIT_PTS;
}
}
protected _handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
if (this.waitingData) {
this.waitingData.complete = true;
return;
}
super._handleFragmentLoadComplete(fragLoadedData);
}
onBufferReset(/* event: Events.BUFFER_RESET */) {
// reset reference to sourcebuffers
this.mediaBuffer = this.videoBuffer = null;
this.loadedmetadata = false;
}
onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
const audioTrack = data.tracks.audio;
if (audioTrack) {
this.mediaBuffer = audioTrack.buffer || null;
}
if (data.tracks.video) {
this.videoBuffer = data.tracks.video.buffer || null;
}
}
onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
const { frag, part } = data;
if (frag.type !== PlaylistLevelType.AUDIO) {
if (!this.loadedmetadata && frag.type === PlaylistLevelType.MAIN) {
const bufferable = this.videoBuffer || this.media;
if (bufferable) {
const bufferedTimeRanges = BufferHelper.getBuffered(bufferable);
if (bufferedTimeRanges.length) {
this.loadedmetadata = true;
}
}
}
return;
}
if (this.fragContextChanged(frag)) {
// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
// Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
this.warn(
`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
frag.level
} finished buffering, but was aborted. state: ${
this.state
}, audioSwitch: ${
this.switchingTrack ? this.switchingTrack.name : 'false'
}`,
);
return;
}
if (frag.sn !== 'initSegment') {
this.fragPrevious = frag;
const track = this.switchingTrack;
if (track) {
this.bufferedTrack = track;
this.switchingTrack = null;
this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { ...track });
}
}
this.fragBufferedComplete(frag, part);
}
private onError(event: Events.ERROR, data: ErrorData) {
if (data.fatal) {
this.state = State.ERROR;
return;
}
switch (data.details) {
case ErrorDetails.FRAG_GAP:
case ErrorDetails.FRAG_PARSING_ERROR:
case ErrorDetails.FRAG_DECRYPT_ERROR:
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO, data);
break;
case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
case ErrorDetails.LEVEL_PARSING_ERROR:
// in case of non fatal error while loading track, if not retrying to load track, switch back to IDLE
if (
!data.levelRetry &&
this.state === State.WAITING_TRACK &&
data.context?.type === PlaylistContextType.AUDIO_TRACK
) {
this.state = State.IDLE;
}
break;
case ErrorDetails.BUFFER_APPEND_ERROR:
case ErrorDetails.BUFFER_FULL_ERROR:
if (!data.parent || data.parent !== 'audio') {
return;
}
if (data.details === ErrorDetails.BUFFER_APPEND_ERROR) {
this.resetLoadingState();
return;
}
if (this.reduceLengthAndFlushBuffer(data)) {
this.bufferedTrack = null;
super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
}
break;
case ErrorDetails.INTERNAL_EXCEPTION:
this.recoverWorkerError(data);
break;
default:
break;
}
}
private onBufferFlushing(
event: Events.BUFFER_FLUSHING,
{ type }: BufferFlushingData,
) {
if (type !== ElementaryStreamTypes.VIDEO) {
this.flushing = true;
}
}
private onBufferFlushed(
event: Events.BUFFER_FLUSHED,
{ type }: BufferFlushedData,
) {
if (type !== ElementaryStreamTypes.VIDEO) {
this.flushing = false;
this.bufferFlushed = true;
if (this.state === State.ENDED) {
this.state = State.IDLE;
}
const mediaBuffer = this.mediaBuffer || this.media;
if (mediaBuffer) {
this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.AUDIO);
this.tick();
}
}
}
private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
const id = 'audio';
const { hls } = this;
const { remuxResult, chunkMeta } = transmuxResult;
const context = this.getCurrentContext(chunkMeta);
if (!context) {
this.resetWhenMissingContext(chunkMeta);
return;
}
const { frag, part, level } = context;
const { details } = level;
const { audio, text, id3, initSegment } = remuxResult;
// Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
if (this.fragContextChanged(frag) || !details) {
this.fragmentTracker.removeFragment(frag);
return;
}
this.state = State.PARSING;
if (this.switchingTrack && audio) {
this.completeAudioSwitch(this.switchingTrack);
}
if (initSegment?.tracks) {
const mapFragment = frag.initSegment || frag;
this._bufferInitSegment(
level,
initSegment.tracks,
mapFragment,
chunkMeta,
);
hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
frag: mapFragment,
id,
tracks: initSegment.tracks,
});
// Only flush audio from old audio tracks when PTS is known on new audio track
}
if (audio) {
const { startPTS, endPTS, startDTS, endDTS } = audio;
if (part) {
part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
startPTS,
endPTS,
startDTS,
endDTS,
};
}
frag.setElementaryStreamInfo(
ElementaryStreamTypes.AUDIO,
startPTS,
endPTS,
startDTS,
endDTS,
);
this.bufferFragmentData(audio, frag, part, chunkMeta);
}
if (id3?.samples?.length) {
const emittedID3: FragParsingMetadataData = Object.assign(
{
id,
frag,
details,
},
id3,
);
hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
}
if (text) {
const emittedText: FragParsingUserdataData = Object.assign(
{
id,
frag,
details,
},
text,
);
hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
}
}
private _bufferInitSegment(
currentLevel: Level,
tracks: TrackSet,
frag: Fragment,
chunkMeta: ChunkMetadata,
) {
if (this.state !== State.PARSING) {
return;
}
// delete any video track found on audio transmuxer
if (tracks.video) {
delete tracks.video;
}
// include levelCodec in audio and video tracks
const track = tracks.audio;
if (!track) {
return;
}
track.id = 'audio';
const variantAudioCodecs = currentLevel.audioCodec;
this.log(
`Init audio buffer, container:${track.container}, codecs[level/parsed]=[${variantAudioCodecs}/${track.codec}]`,
);
// SourceBuffer will use track.levelCodec if defined
if (variantAudioCodecs && variantAudioCodecs.split(',').length === 1) {
track.levelCodec = variantAudioCodecs;
}
this.hls.trigger(Events.BUFFER_CODECS, tracks);
const initSegment = track.initSegment;
if (initSegment?.byteLength) {
const segment: BufferAppendingData = {
type: 'audio',
frag,
part: null,
chunkMeta,
parent: frag.type,
data: initSegment,
};
this.hls.trigger(Events.BUFFER_APPENDING, segment);
}
// trigger handler right now
this.tickImmediate();
}
protected loadFragment(
frag: Fragment,
track: Level,
targetBufferTime: number,
) {
// only load if fragment is not loaded or if in audio switch
const fragState = this.fragmentTracker.getState(frag);
this.fragCurrent = frag;
// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
if (
this.switchingTrack ||
fragState === FragmentState.NOT_LOADED ||
fragState === FragmentState.PARTIAL
) {
if (frag.sn === 'initSegment') {
this._loadInitSegment(frag, track);
} else if (track.details?.live && !this.initPTS[frag.cc]) {
this.log(
`Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`,
);
this.state = State.WAITING_INIT_PTS;
const mainDetails = this.mainDetails;
if (
mainDetails &&
mainDetails.fragments[0].start !== track.details.fragments[0].start
) {
alignMediaPlaylistByPDT(track.details, mainDetails);
}
} else {
this.startFragRequested = true;
super.loadFragment(frag, track, targetBufferTime);
}
} else {
this.clearTrackerIfNeeded(frag);
}
}
private flushAudioIfNeeded(switchingTrack: MediaPlaylist) {
if (this.media && this.bufferedTrack) {
const { name, lang, assocLang, characteristics, audioCodec, channels } =
this.bufferedTrack;
if (
!matchesOption(
{ name, lang, assocLang, characteristics, audioCodec, channels },
switchingTrack,
audioMatchPredicate,
)
) {
this.log('Switching audio track : flushing all audio');
super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
this.bufferedTrack = null;
}
}
}
private completeAudioSwitch(switchingTrack: MediaPlaylist) {
const { hls } = this;
this.flushAudioIfNeeded(switchingTrack);
this.bufferedTrack = switchingTrack;
this.switchingTrack = null;
hls.trigger(Events.AUDIO_TRACK_SWITCHED, { ...switchingTrack });
}
}
export default AudioStreamController;

View File

@@ -0,0 +1,436 @@
import BasePlaylistController from './base-playlist-controller';
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import { PlaylistContextType } from '../types/loader';
import { mediaAttributesIdentical } from '../utils/media-option-attributes';
import {
audioMatchPredicate,
findClosestLevelWithAudioGroup,
findMatchingOption,
matchesOption,
} from '../utils/rendition-helper';
import type Hls from '../hls';
import type {
AudioSelectionOption,
MediaPlaylist,
} from '../types/media-playlist';
import type { HlsUrlParameters } from '../types/level';
import type {
ManifestParsedData,
AudioTracksUpdatedData,
ErrorData,
LevelLoadingData,
AudioTrackLoadedData,
LevelSwitchingData,
} from '../types/events';
class AudioTrackController extends BasePlaylistController {
private tracks: MediaPlaylist[] = [];
private groupIds: (string | undefined)[] | null = null;
private tracksInGroup: MediaPlaylist[] = [];
private trackId: number = -1;
private currentTrack: MediaPlaylist | null = null;
private selectDefaultTrack: boolean = true;
constructor(hls: Hls) {
super(hls, '[audio-track-controller]');
this.registerListeners();
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this.unregisterListeners();
this.tracks.length = 0;
this.tracksInGroup.length = 0;
this.currentTrack = null;
super.destroy();
}
protected onManifestLoading(): void {
this.tracks = [];
this.tracksInGroup = [];
this.groupIds = null;
this.currentTrack = null;
this.trackId = -1;
this.selectDefaultTrack = true;
}
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData,
): void {
this.tracks = data.audioTracks || [];
}
protected onAudioTrackLoaded(
event: Events.AUDIO_TRACK_LOADED,
data: AudioTrackLoadedData,
): void {
const { id, groupId, details } = data;
const trackInActiveGroup = this.tracksInGroup[id];
if (!trackInActiveGroup || trackInActiveGroup.groupId !== groupId) {
this.warn(
`Audio track with id:${id} and group:${groupId} not found in active group ${trackInActiveGroup?.groupId}`,
);
return;
}
const curDetails = trackInActiveGroup.details;
trackInActiveGroup.details = data.details;
this.log(
`Audio track ${id} "${trackInActiveGroup.name}" lang:${trackInActiveGroup.lang} group:${groupId} loaded [${details.startSN}-${details.endSN}]`,
);
if (id === this.trackId) {
this.playlistLoaded(id, data, curDetails);
}
}
protected onLevelLoading(
event: Events.LEVEL_LOADING,
data: LevelLoadingData,
): void {
this.switchLevel(data.level);
}
protected onLevelSwitching(
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData,
): void {
this.switchLevel(data.level);
}
private switchLevel(levelIndex: number) {
const levelInfo = this.hls.levels[levelIndex];
if (!levelInfo) {
return;
}
const audioGroups = levelInfo.audioGroups || null;
const currentGroups = this.groupIds;
let currentTrack = this.currentTrack;
if (
!audioGroups ||
currentGroups?.length !== audioGroups?.length ||
audioGroups?.some((groupId) => currentGroups?.indexOf(groupId) === -1)
) {
this.groupIds = audioGroups;
this.trackId = -1;
this.currentTrack = null;
const audioTracks = this.tracks.filter(
(track): boolean =>
!audioGroups || audioGroups.indexOf(track.groupId) !== -1,
);
if (audioTracks.length) {
// Disable selectDefaultTrack if there are no default tracks
if (
this.selectDefaultTrack &&
!audioTracks.some((track) => track.default)
) {
this.selectDefaultTrack = false;
}
// track.id should match hls.audioTracks index
audioTracks.forEach((track, i) => {
track.id = i;
});
} else if (!currentTrack && !this.tracksInGroup.length) {
// Do not dispatch AUDIO_TRACKS_UPDATED when there were and are no tracks
return;
}
this.tracksInGroup = audioTracks;
// Find preferred track
const audioPreference = this.hls.config.audioPreference;
if (!currentTrack && audioPreference) {
const groupIndex = findMatchingOption(
audioPreference,
audioTracks,
audioMatchPredicate,
);
if (groupIndex > -1) {
currentTrack = audioTracks[groupIndex];
} else {
const allIndex = findMatchingOption(audioPreference, this.tracks);
currentTrack = this.tracks[allIndex];
}
}
// Select initial track
let trackId = this.findTrackId(currentTrack);
if (trackId === -1 && currentTrack) {
trackId = this.findTrackId(null);
}
// Dispatch events and load track if needed
const audioTracksUpdated: AudioTracksUpdatedData = { audioTracks };
this.log(
`Updating audio tracks, ${
audioTracks.length
} track(s) found in group(s): ${audioGroups?.join(',')}`,
);
this.hls.trigger(Events.AUDIO_TRACKS_UPDATED, audioTracksUpdated);
const selectedTrackId = this.trackId;
if (trackId !== -1 && selectedTrackId === -1) {
this.setAudioTrack(trackId);
} else if (audioTracks.length && selectedTrackId === -1) {
const error = new Error(
`No audio track selected for current audio group-ID(s): ${this.groupIds?.join(
',',
)} track count: ${audioTracks.length}`,
);
this.warn(error.message);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.AUDIO_TRACK_LOAD_ERROR,
fatal: true,
error,
});
}
} else if (this.shouldReloadPlaylist(currentTrack)) {
// Retry playlist loading if no playlist is or has been loaded yet
this.setAudioTrack(this.trackId);
}
}
protected onError(event: Events.ERROR, data: ErrorData): void {
if (data.fatal || !data.context) {
return;
}
if (
data.context.type === PlaylistContextType.AUDIO_TRACK &&
data.context.id === this.trackId &&
(!this.groupIds || this.groupIds.indexOf(data.context.groupId) !== -1)
) {
this.requestScheduled = -1;
this.checkRetry(data);
}
}
get allAudioTracks(): MediaPlaylist[] {
return this.tracks;
}
get audioTracks(): MediaPlaylist[] {
return this.tracksInGroup;
}
get audioTrack(): number {
return this.trackId;
}
set audioTrack(newId: number) {
// If audio track is selected from API then don't choose from the manifest default track
this.selectDefaultTrack = false;
this.setAudioTrack(newId);
}
public setAudioOption(
audioOption: MediaPlaylist | AudioSelectionOption | undefined,
): MediaPlaylist | null {
const hls = this.hls;
hls.config.audioPreference = audioOption;
if (audioOption) {
const allAudioTracks = this.allAudioTracks;
this.selectDefaultTrack = false;
if (allAudioTracks.length) {
// First see if current option matches (no switch op)
const currentTrack = this.currentTrack;
if (
currentTrack &&
matchesOption(audioOption, currentTrack, audioMatchPredicate)
) {
return currentTrack;
}
// Find option in available tracks (tracksInGroup)
const groupIndex = findMatchingOption(
audioOption,
this.tracksInGroup,
audioMatchPredicate,
);
if (groupIndex > -1) {
const track = this.tracksInGroup[groupIndex];
this.setAudioTrack(groupIndex);
return track;
} else if (currentTrack) {
// Find option in nearest level audio group
let searchIndex = hls.loadLevel;
if (searchIndex === -1) {
searchIndex = hls.firstAutoLevel;
}
const switchIndex = findClosestLevelWithAudioGroup(
audioOption,
hls.levels,
allAudioTracks,
searchIndex,
audioMatchPredicate,
);
if (switchIndex === -1) {
// could not find matching variant
return null;
}
// and switch level to acheive the audio group switch
hls.nextLoadLevel = switchIndex;
}
if (audioOption.channels || audioOption.audioCodec) {
// Could not find a match with codec / channels predicate
// Find a match without channels or codec
const withoutCodecAndChannelsMatch = findMatchingOption(
audioOption,
allAudioTracks,
);
if (withoutCodecAndChannelsMatch > -1) {
return allAudioTracks[withoutCodecAndChannelsMatch];
}
}
}
}
return null;
}
private setAudioTrack(newId: number): void {
const tracks = this.tracksInGroup;
// check if level idx is valid
if (newId < 0 || newId >= tracks.length) {
this.warn(`Invalid audio track id: ${newId}`);
return;
}
// stopping live reloading timer if any
this.clearTimer();
this.selectDefaultTrack = false;
const lastTrack = this.currentTrack;
const track = tracks[newId];
const trackLoaded = track.details && !track.details.live;
if (newId === this.trackId && track === lastTrack && trackLoaded) {
return;
}
this.log(
`Switching to audio-track ${newId} "${track.name}" lang:${track.lang} group:${track.groupId} channels:${track.channels}`,
);
this.trackId = newId;
this.currentTrack = track;
this.hls.trigger(Events.AUDIO_TRACK_SWITCHING, { ...track });
// Do not reload track unless live
if (trackLoaded) {
return;
}
const hlsUrlParameters = this.switchParams(
track.url,
lastTrack?.details,
track.details,
);
this.loadPlaylist(hlsUrlParameters);
}
private findTrackId(currentTrack: MediaPlaylist | null): number {
const audioTracks = this.tracksInGroup;
for (let i = 0; i < audioTracks.length; i++) {
const track = audioTracks[i];
if (this.selectDefaultTrack && !track.default) {
continue;
}
if (
!currentTrack ||
matchesOption(currentTrack, track, audioMatchPredicate)
) {
return i;
}
}
if (currentTrack) {
const { name, lang, assocLang, characteristics, audioCodec, channels } =
currentTrack;
for (let i = 0; i < audioTracks.length; i++) {
const track = audioTracks[i];
if (
matchesOption(
{ name, lang, assocLang, characteristics, audioCodec, channels },
track,
audioMatchPredicate,
)
) {
return i;
}
}
for (let i = 0; i < audioTracks.length; i++) {
const track = audioTracks[i];
if (
mediaAttributesIdentical(currentTrack.attrs, track.attrs, [
'LANGUAGE',
'ASSOC-LANGUAGE',
'CHARACTERISTICS',
])
) {
return i;
}
}
for (let i = 0; i < audioTracks.length; i++) {
const track = audioTracks[i];
if (
mediaAttributesIdentical(currentTrack.attrs, track.attrs, [
'LANGUAGE',
])
) {
return i;
}
}
}
return -1;
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {
const audioTrack = this.currentTrack;
if (this.shouldLoadPlaylist(audioTrack) && audioTrack) {
super.loadPlaylist();
const id = audioTrack.id;
const groupId = audioTrack.groupId as string;
let url = audioTrack.url;
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`,
);
}
}
// track not retrieved yet, or live playlist we need to (re)load it
this.log(
`loading audio-track playlist ${id} "${audioTrack.name}" lang:${audioTrack.lang} group:${groupId}`,
);
this.clearTimer();
this.hls.trigger(Events.AUDIO_TRACK_LOADING, {
url,
id,
groupId,
deliveryDirectives: hlsUrlParameters || null,
});
}
}
}
export default AudioTrackController;

View File

@@ -0,0 +1,348 @@
import type Hls from '../hls';
import type { NetworkComponentAPI } from '../types/component-api';
import { getSkipValue, HlsSkip, HlsUrlParameters, Level } from '../types/level';
import { computeReloadInterval, mergeDetails } from '../utils/level-helper';
import { ErrorData } from '../types/events';
import { getRetryDelay, isTimeoutError } from '../utils/error-helper';
import { NetworkErrorAction } from './error-controller';
import { logger } from '../utils/logger';
import type { LevelDetails } from '../loader/level-details';
import type { MediaPlaylist } from '../types/media-playlist';
import type {
AudioTrackLoadedData,
LevelLoadedData,
TrackLoadedData,
} from '../types/events';
export default class BasePlaylistController implements NetworkComponentAPI {
protected hls: Hls;
protected timer: number = -1;
protected requestScheduled: number = -1;
protected canLoad: boolean = false;
protected log: (msg: any) => void;
protected warn: (msg: any) => void;
constructor(hls: Hls, logPrefix: string) {
this.log = logger.log.bind(logger, `${logPrefix}:`);
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
this.hls = hls;
}
public destroy(): void {
this.clearTimer();
// @ts-ignore
this.hls = this.log = this.warn = null;
}
protected clearTimer(): void {
if (this.timer !== -1) {
self.clearTimeout(this.timer);
this.timer = -1;
}
}
public startLoad(): void {
this.canLoad = true;
this.requestScheduled = -1;
this.loadPlaylist();
}
public stopLoad(): void {
this.canLoad = false;
this.clearTimer();
}
protected switchParams(
playlistUri: string,
previous: LevelDetails | undefined,
current: LevelDetails | undefined,
): HlsUrlParameters | undefined {
const renditionReports = previous?.renditionReports;
if (renditionReports) {
let foundIndex = -1;
for (let i = 0; i < renditionReports.length; i++) {
const attr = renditionReports[i];
let uri: string;
try {
uri = new self.URL(attr.URI, previous.url).href;
} catch (error) {
logger.warn(
`Could not construct new URL for Rendition Report: ${error}`,
);
uri = attr.URI || '';
}
// Use exact match. Otherwise, the last partial match, if any, will be used
// (Playlist URI includes a query string that the Rendition Report does not)
if (uri === playlistUri) {
foundIndex = i;
break;
} else if (uri === playlistUri.substring(0, uri.length)) {
foundIndex = i;
}
}
if (foundIndex !== -1) {
const attr = renditionReports[foundIndex];
const msn = parseInt(attr['LAST-MSN']) || previous?.lastPartSn;
let part = parseInt(attr['LAST-PART']) || previous?.lastPartIndex;
if (this.hls.config.lowLatencyMode) {
const currentGoal = Math.min(
previous.age - previous.partTarget,
previous.targetduration,
);
if (part >= 0 && currentGoal > previous.partTarget) {
part += 1;
}
}
const skip = current && getSkipValue(current);
return new HlsUrlParameters(msn, part >= 0 ? part : undefined, skip);
}
}
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {
if (this.requestScheduled === -1) {
this.requestScheduled = self.performance.now();
}
// Loading is handled by the subclasses
}
protected shouldLoadPlaylist(
playlist: Level | MediaPlaylist | null | undefined,
): boolean {
return (
this.canLoad &&
!!playlist &&
!!playlist.url &&
(!playlist.details || playlist.details.live)
);
}
protected shouldReloadPlaylist(
playlist: Level | MediaPlaylist | null | undefined,
): boolean {
return (
this.timer === -1 &&
this.requestScheduled === -1 &&
this.shouldLoadPlaylist(playlist)
);
}
protected playlistLoaded(
index: number,
data: LevelLoadedData | AudioTrackLoadedData | TrackLoadedData,
previousDetails?: LevelDetails,
) {
const { details, stats } = data;
// Set last updated date-time
const now = self.performance.now();
const elapsed = stats.loading.first
? Math.max(0, now - stats.loading.first)
: 0;
details.advancedDateTime = Date.now() - elapsed;
// if current playlist is a live playlist, arm a timer to reload it
if (details.live || previousDetails?.live) {
details.reloaded(previousDetails);
if (previousDetails) {
this.log(
`live playlist ${index} ${
details.advanced
? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex
: details.updated
? 'UPDATED'
: 'MISSED'
}`,
);
}
// Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
if (previousDetails && details.fragments.length > 0) {
mergeDetails(previousDetails, details);
}
if (!this.canLoad || !details.live) {
return;
}
let deliveryDirectives: HlsUrlParameters | undefined;
let msn: number | undefined = undefined;
let part: number | undefined = undefined;
if (details.canBlockReload && details.endSN && details.advanced) {
// Load level with LL-HLS delivery directives
const lowLatencyMode = this.hls.config.lowLatencyMode;
const lastPartSn = details.lastPartSn;
const endSn = details.endSN;
const lastPartIndex = details.lastPartIndex;
const hasParts = lastPartIndex !== -1;
const lastPart = lastPartSn === endSn;
// When low latency mode is disabled, we'll skip part requests once the last part index is found
const nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex;
if (hasParts) {
msn = lastPart ? endSn + 1 : lastPartSn;
part = lastPart ? nextSnStartIndex : lastPartIndex + 1;
} else {
msn = endSn + 1;
}
// Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
// Update directives to obtain the Playlist that has the estimated additional duration of media
const lastAdvanced = details.age;
const cdnAge = lastAdvanced + details.ageHeader;
let currentGoal = Math.min(
cdnAge - details.partTarget,
details.targetduration * 1.5,
);
if (currentGoal > 0) {
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
// If we attempted to get the next or latest playlist update, but currentGoal increased,
// then we either can't catchup, or the "age" header cannot be trusted.
this.warn(
`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`,
);
currentGoal = 0;
} else {
const segments = Math.floor(currentGoal / details.targetduration);
msn += segments;
if (part !== undefined) {
const parts = Math.round(
(currentGoal % details.targetduration) / details.partTarget,
);
part += parts;
}
this.log(
`CDN Tune-in age: ${
details.ageHeader
}s last advanced ${lastAdvanced.toFixed(
2,
)}s goal: ${currentGoal} skip sn ${segments} to part ${part}`,
);
}
details.tuneInGoal = currentGoal;
}
deliveryDirectives = this.getDeliveryDirectives(
details,
data.deliveryDirectives,
msn,
part,
);
if (lowLatencyMode || !lastPart) {
this.loadPlaylist(deliveryDirectives);
return;
}
} else if (details.canBlockReload || details.canSkipUntil) {
deliveryDirectives = this.getDeliveryDirectives(
details,
data.deliveryDirectives,
msn,
part,
);
}
const bufferInfo = this.hls.mainForwardBufferInfo;
const position = bufferInfo ? bufferInfo.end - bufferInfo.len : 0;
const distanceToLiveEdgeMs = (details.edge - position) * 1000;
const reloadInterval = computeReloadInterval(
details,
distanceToLiveEdgeMs,
);
if (details.updated && now > this.requestScheduled + reloadInterval) {
this.requestScheduled = stats.loading.start;
}
if (msn !== undefined && details.canBlockReload) {
this.requestScheduled =
stats.loading.first +
reloadInterval -
(details.partTarget * 1000 || 1000);
} else if (
this.requestScheduled === -1 ||
this.requestScheduled + reloadInterval < now
) {
this.requestScheduled = now;
} else if (this.requestScheduled - now <= 0) {
this.requestScheduled += reloadInterval;
}
let estimatedTimeUntilUpdate = this.requestScheduled - now;
estimatedTimeUntilUpdate = Math.max(0, estimatedTimeUntilUpdate);
this.log(
`reload live playlist ${index} in ${Math.round(
estimatedTimeUntilUpdate,
)} ms`,
);
// this.log(
// `live reload ${details.updated ? 'REFRESHED' : 'MISSED'}
// reload in ${estimatedTimeUntilUpdate / 1000}
// round trip ${(stats.loading.end - stats.loading.start) / 1000}
// diff ${
// (reloadInterval -
// (estimatedTimeUntilUpdate +
// stats.loading.end -
// stats.loading.start)) /
// 1000
// }
// reload interval ${reloadInterval / 1000}
// target duration ${details.targetduration}
// distance to edge ${distanceToLiveEdgeMs / 1000}`
// );
this.timer = self.setTimeout(
() => this.loadPlaylist(deliveryDirectives),
estimatedTimeUntilUpdate,
);
} else {
this.clearTimer();
}
}
private getDeliveryDirectives(
details: LevelDetails,
previousDeliveryDirectives: HlsUrlParameters | null,
msn?: number,
part?: number,
): HlsUrlParameters {
let skip = getSkipValue(details);
if (previousDeliveryDirectives?.skip && details.deltaUpdateFailed) {
msn = previousDeliveryDirectives.msn;
part = previousDeliveryDirectives.part;
skip = HlsSkip.No;
}
return new HlsUrlParameters(msn, part, skip);
}
protected checkRetry(errorEvent: ErrorData): boolean {
const errorDetails = errorEvent.details;
const isTimeout = isTimeoutError(errorEvent);
const errorAction = errorEvent.errorAction;
const { action, retryCount = 0, retryConfig } = errorAction || {};
const retry =
!!errorAction &&
!!retryConfig &&
(action === NetworkErrorAction.RetryRequest ||
(!errorAction.resolved &&
action === NetworkErrorAction.SendAlternateToPenaltyBox));
if (retry) {
this.requestScheduled = -1;
if (retryCount >= retryConfig.maxNumRetry) {
return false;
}
if (isTimeout && errorEvent.context?.deliveryDirectives) {
// The LL-HLS request already timed out so retry immediately
this.warn(
`Retrying playlist loading ${retryCount + 1}/${
retryConfig.maxNumRetry
} after "${errorDetails}" without delivery-directives`,
);
this.loadPlaylist();
} else {
const delay = getRetryDelay(retryConfig, retryCount);
// Schedule level/track reload
this.timer = self.setTimeout(() => this.loadPlaylist(), delay);
this.warn(
`Retrying playlist loading ${retryCount + 1}/${
retryConfig.maxNumRetry
} after "${errorDetails}" in ${delay}ms`,
);
}
// `levelRetry = true` used to inform other controllers that a retry is happening
errorEvent.levelRetry = true;
errorAction.resolved = true;
}
return retry;
}
}

File diff suppressed because it is too large Load Diff

1197
server/node_modules/hls.js/src/controller/buffer-controller.ts generated vendored Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
import { logger } from '../utils/logger';
import type {
BufferOperation,
BufferOperationQueues,
SourceBuffers,
SourceBufferName,
} from '../types/buffer';
export default class BufferOperationQueue {
private buffers: SourceBuffers;
private queues: BufferOperationQueues = {
video: [],
audio: [],
audiovideo: [],
};
constructor(sourceBufferReference: SourceBuffers) {
this.buffers = sourceBufferReference;
}
public append(
operation: BufferOperation,
type: SourceBufferName,
pending?: boolean,
) {
const queue = this.queues[type];
queue.push(operation);
if (queue.length === 1 && !pending) {
this.executeNext(type);
}
}
public insertAbort(operation: BufferOperation, type: SourceBufferName) {
const queue = this.queues[type];
queue.unshift(operation);
this.executeNext(type);
}
public appendBlocker(type: SourceBufferName): Promise<{}> {
let execute;
const promise: Promise<{}> = new Promise((resolve) => {
execute = resolve;
});
const operation: BufferOperation = {
execute,
onStart: () => {},
onComplete: () => {},
onError: () => {},
};
this.append(operation, type);
return promise;
}
public executeNext(type: SourceBufferName) {
const queue = this.queues[type];
if (queue.length) {
const operation: BufferOperation = queue[0];
try {
// Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
// which do not end with this event must call _onSBUpdateEnd manually
operation.execute();
} catch (error) {
logger.warn(
`[buffer-operation-queue]: Exception executing "${type}" SourceBuffer operation: ${error}`,
);
operation.onError(error);
// Only shift the current operation off, otherwise the updateend handler will do this for us
const sb = this.buffers[type];
if (!sb?.updating) {
this.shiftAndExecuteNext(type);
}
}
}
}
public shiftAndExecuteNext(type: SourceBufferName) {
this.queues[type].shift();
this.executeNext(type);
}
public current(type: SourceBufferName) {
return this.queues[type][0];
}
}

View File

@@ -0,0 +1,319 @@
/*
* cap stream level to media size dimension controller
*/
import { Events } from '../events';
import type { Level } from '../types/level';
import type {
ManifestParsedData,
BufferCodecsData,
MediaAttachingData,
FPSDropLevelCappingData,
LevelsUpdatedData,
} from '../types/events';
import StreamController from './stream-controller';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
type RestrictedLevel = { width: number; height: number; bitrate: number };
class CapLevelController implements ComponentAPI {
private hls: Hls;
private autoLevelCapping: number;
private firstLevel: number;
private media: HTMLVideoElement | null;
private restrictedLevels: RestrictedLevel[];
private timer: number | undefined;
private clientRect: { width: number; height: number } | null;
private streamController?: StreamController;
constructor(hls: Hls) {
this.hls = hls;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
this.firstLevel = -1;
this.media = null;
this.restrictedLevels = [];
this.timer = undefined;
this.clientRect = null;
this.registerListeners();
}
public setStreamController(streamController: StreamController) {
this.streamController = streamController;
}
public destroy() {
if (this.hls) {
this.unregisterListener();
}
if (this.timer) {
this.stopCapping();
}
this.media = null;
this.clientRect = null;
// @ts-ignore
this.hls = this.streamController = null;
}
protected registerListeners() {
const { hls } = this;
hls.on(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
}
protected unregisterListener() {
const { hls } = this;
hls.off(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
}
protected onFpsDropLevelCapping(
event: Events.FPS_DROP_LEVEL_CAPPING,
data: FPSDropLevelCappingData,
) {
// Don't add a restricted level more than once
const level = this.hls.levels[data.droppedLevel];
if (this.isLevelAllowed(level)) {
this.restrictedLevels.push({
bitrate: level.bitrate,
height: level.height,
width: level.width,
});
}
}
protected onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData,
) {
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
this.clientRect = null;
if (this.timer && this.hls.levels.length) {
this.detectPlayerSize();
}
}
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData,
) {
const hls = this.hls;
this.restrictedLevels = [];
this.firstLevel = data.firstLevel;
if (hls.config.capLevelToPlayerSize && data.video) {
// Start capping immediately if the manifest has signaled video codecs
this.startCapping();
}
}
private onLevelsUpdated(
event: Events.LEVELS_UPDATED,
data: LevelsUpdatedData,
) {
if (this.timer && Number.isFinite(this.autoLevelCapping)) {
this.detectPlayerSize();
}
}
// Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
// to the first level
protected onBufferCodecs(
event: Events.BUFFER_CODECS,
data: BufferCodecsData,
) {
const hls = this.hls;
if (hls.config.capLevelToPlayerSize && data.video) {
// If the manifest did not signal a video codec capping has been deferred until we're certain video is present
this.startCapping();
}
}
protected onMediaDetaching() {
this.stopCapping();
}
detectPlayerSize() {
if (this.media) {
if (this.mediaHeight <= 0 || this.mediaWidth <= 0) {
this.clientRect = null;
return;
}
const levels = this.hls.levels;
if (levels.length) {
const hls = this.hls;
const maxLevel = this.getMaxLevel(levels.length - 1);
if (maxLevel !== this.autoLevelCapping) {
logger.log(
`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`,
);
}
hls.autoLevelCapping = maxLevel;
if (
hls.autoLevelCapping > this.autoLevelCapping &&
this.streamController
) {
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
// usually happen when the user go to the fullscreen mode.
this.streamController.nextLevelSwitch();
}
this.autoLevelCapping = hls.autoLevelCapping;
}
}
}
/*
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
*/
getMaxLevel(capLevelIndex: number): number {
const levels = this.hls.levels;
if (!levels.length) {
return -1;
}
const validLevels = levels.filter(
(level, index) => this.isLevelAllowed(level) && index <= capLevelIndex,
);
this.clientRect = null;
return CapLevelController.getMaxLevelByMediaSize(
validLevels,
this.mediaWidth,
this.mediaHeight,
);
}
startCapping() {
if (this.timer) {
// Don't reset capping if started twice; this can happen if the manifest signals a video codec
return;
}
this.autoLevelCapping = Number.POSITIVE_INFINITY;
self.clearInterval(this.timer);
this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000);
this.detectPlayerSize();
}
stopCapping() {
this.restrictedLevels = [];
this.firstLevel = -1;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
if (this.timer) {
self.clearInterval(this.timer);
this.timer = undefined;
}
}
getDimensions(): { width: number; height: number } {
if (this.clientRect) {
return this.clientRect;
}
const media = this.media;
const boundsRect = {
width: 0,
height: 0,
};
if (media) {
const clientRect = media.getBoundingClientRect();
boundsRect.width = clientRect.width;
boundsRect.height = clientRect.height;
if (!boundsRect.width && !boundsRect.height) {
// When the media element has no width or height (equivalent to not being in the DOM),
// then use its width and height attributes (media.width, media.height)
boundsRect.width =
clientRect.right - clientRect.left || media.width || 0;
boundsRect.height =
clientRect.bottom - clientRect.top || media.height || 0;
}
}
this.clientRect = boundsRect;
return boundsRect;
}
get mediaWidth(): number {
return this.getDimensions().width * this.contentScaleFactor;
}
get mediaHeight(): number {
return this.getDimensions().height * this.contentScaleFactor;
}
get contentScaleFactor(): number {
let pixelRatio = 1;
if (!this.hls.config.ignoreDevicePixelRatio) {
try {
pixelRatio = self.devicePixelRatio;
} catch (e) {
/* no-op */
}
}
return pixelRatio;
}
private isLevelAllowed(level: Level): boolean {
const restrictedLevels = this.restrictedLevels;
return !restrictedLevels.some((restrictedLevel) => {
return (
level.bitrate === restrictedLevel.bitrate &&
level.width === restrictedLevel.width &&
level.height === restrictedLevel.height
);
});
}
static getMaxLevelByMediaSize(
levels: Array<Level>,
width: number,
height: number,
): number {
if (!levels?.length) {
return -1;
}
// Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
// to determine whether we've chosen the greatest bandwidth for the media's dimensions
const atGreatestBandwidth = (
curLevel: Level,
nextLevel: Level | undefined,
) => {
if (!nextLevel) {
return true;
}
return (
curLevel.width !== nextLevel.width ||
curLevel.height !== nextLevel.height
);
};
// If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
// the max level
let maxLevelIndex = levels.length - 1;
// Prevent changes in aspect-ratio from causing capping to toggle back and forth
const squareSize = Math.max(width, height);
for (let i = 0; i < levels.length; i += 1) {
const level = levels[i];
if (
(level.width >= squareSize || level.height >= squareSize) &&
atGreatestBandwidth(level, levels[i + 1])
) {
maxLevelIndex = i;
break;
}
}
return maxLevelIndex;
}
}
export default CapLevelController;

View File

@@ -0,0 +1,390 @@
import { Events } from '../events';
import Hls from '../hls';
import { Cmcd } from '@svta/common-media-library/cmcd/Cmcd';
import { CmcdObjectType } from '@svta/common-media-library/cmcd/CmcdObjectType';
import { CmcdStreamingFormat } from '@svta/common-media-library/cmcd/CmcdStreamingFormat';
import { appendCmcdHeaders } from '@svta/common-media-library/cmcd/appendCmcdHeaders';
import { appendCmcdQuery } from '@svta/common-media-library/cmcd/appendCmcdQuery';
import { uuid } from '@svta/common-media-library/utils/uuid';
import { BufferHelper } from '../utils/buffer-helper';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type { Fragment } from '../loader/fragment';
import type { BufferCreatedData, MediaAttachedData } from '../types/events';
import type {
FragmentLoaderContext,
Loader,
LoaderCallbacks,
LoaderConfiguration,
LoaderContext,
PlaylistLoaderContext,
} from '../types/loader';
import type {
FragmentLoaderConstructor,
HlsConfig,
PlaylistLoaderConstructor,
} from '../config';
/**
* Controller to deal with Common Media Client Data (CMCD)
* @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
*/
export default class CMCDController implements ComponentAPI {
private hls: Hls;
private config: HlsConfig;
private media?: HTMLMediaElement;
private sid?: string;
private cid?: string;
private useHeaders: boolean = false;
private includeKeys?: string[];
private initialized: boolean = false;
private starved: boolean = false;
private buffering: boolean = true;
private audioBuffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
private videoBuffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
constructor(hls: Hls) {
this.hls = hls;
const config = (this.config = hls.config);
const { cmcd } = config;
if (cmcd != null) {
config.pLoader = this.createPlaylistLoader();
config.fLoader = this.createFragmentLoader();
this.sid = cmcd.sessionId || uuid();
this.cid = cmcd.contentId;
this.useHeaders = cmcd.useHeaders === true;
this.includeKeys = cmcd.includeKeys;
this.registerListeners();
}
}
private registerListeners() {
const hls = this.hls;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHED, this.onMediaDetached, this);
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
}
private unregisterListeners() {
const hls = this.hls;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHED, this.onMediaDetached, this);
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
}
destroy() {
this.unregisterListeners();
this.onMediaDetached();
// @ts-ignore
this.hls = this.config = this.audioBuffer = this.videoBuffer = null;
// @ts-ignore
this.onWaiting = this.onPlaying = null;
}
private onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData,
) {
this.media = data.media;
this.media.addEventListener('waiting', this.onWaiting);
this.media.addEventListener('playing', this.onPlaying);
}
private onMediaDetached() {
if (!this.media) {
return;
}
this.media.removeEventListener('waiting', this.onWaiting);
this.media.removeEventListener('playing', this.onPlaying);
// @ts-ignore
this.media = null;
}
private onBufferCreated(
event: Events.BUFFER_CREATED,
data: BufferCreatedData,
) {
this.audioBuffer = data.tracks.audio?.buffer;
this.videoBuffer = data.tracks.video?.buffer;
}
private onWaiting = () => {
if (this.initialized) {
this.starved = true;
}
this.buffering = true;
};
private onPlaying = () => {
if (!this.initialized) {
this.initialized = true;
}
this.buffering = false;
};
/**
* Create baseline CMCD data
*/
private createData(): Cmcd {
return {
v: 1,
sf: CmcdStreamingFormat.HLS,
sid: this.sid,
cid: this.cid,
pr: this.media?.playbackRate,
mtp: this.hls.bandwidthEstimate / 1000,
};
}
/**
* Apply CMCD data to a request.
*/
private apply(context: LoaderContext, data: Cmcd = {}) {
// apply baseline data
Object.assign(data, this.createData());
const isVideo =
data.ot === CmcdObjectType.INIT ||
data.ot === CmcdObjectType.VIDEO ||
data.ot === CmcdObjectType.MUXED;
if (this.starved && isVideo) {
data.bs = true;
data.su = true;
this.starved = false;
}
if (data.su == null) {
data.su = this.buffering;
}
// TODO: Implement rtp, nrr, nor, dl
const { includeKeys } = this;
if (includeKeys) {
data = Object.keys(data).reduce((acc, key) => {
includeKeys.includes(key) && (acc[key] = data[key]);
return acc;
}, {});
}
if (this.useHeaders) {
if (!context.headers) {
context.headers = {};
}
appendCmcdHeaders(context.headers, data);
} else {
context.url = appendCmcdQuery(context.url, data);
}
}
/**
* Apply CMCD data to a manifest request.
*/
private applyPlaylistData = (context: PlaylistLoaderContext) => {
try {
this.apply(context, {
ot: CmcdObjectType.MANIFEST,
su: !this.initialized,
});
} catch (error) {
logger.warn('Could not generate manifest CMCD data.', error);
}
};
/**
* Apply CMCD data to a segment request
*/
private applyFragmentData = (context: FragmentLoaderContext) => {
try {
const fragment = context.frag;
const level = this.hls.levels[fragment.level];
const ot = this.getObjectType(fragment);
const data: Cmcd = {
d: fragment.duration * 1000,
ot,
};
if (
ot === CmcdObjectType.VIDEO ||
ot === CmcdObjectType.AUDIO ||
ot == CmcdObjectType.MUXED
) {
data.br = level.bitrate / 1000;
data.tb = this.getTopBandwidth(ot) / 1000;
data.bl = this.getBufferLength(ot);
}
this.apply(context, data);
} catch (error) {
logger.warn('Could not generate segment CMCD data.', error);
}
};
/**
* The CMCD object type.
*/
private getObjectType(fragment: Fragment): CmcdObjectType | undefined {
const { type } = fragment;
if (type === 'subtitle') {
return CmcdObjectType.TIMED_TEXT;
}
if (fragment.sn === 'initSegment') {
return CmcdObjectType.INIT;
}
if (type === 'audio') {
return CmcdObjectType.AUDIO;
}
if (type === 'main') {
if (!this.hls.audioTracks.length) {
return CmcdObjectType.MUXED;
}
return CmcdObjectType.VIDEO;
}
return undefined;
}
/**
* Get the highest bitrate.
*/
private getTopBandwidth(type: CmcdObjectType) {
let bitrate: number = 0;
let levels;
const hls = this.hls;
if (type === CmcdObjectType.AUDIO) {
levels = hls.audioTracks;
} else {
const max = hls.maxAutoLevel;
const len = max > -1 ? max + 1 : hls.levels.length;
levels = hls.levels.slice(0, len);
}
for (const level of levels) {
if (level.bitrate > bitrate) {
bitrate = level.bitrate;
}
}
return bitrate > 0 ? bitrate : NaN;
}
/**
* Get the buffer length for a media type in milliseconds
*/
private getBufferLength(type: CmcdObjectType) {
const media = this.hls.media;
const buffer =
type === CmcdObjectType.AUDIO ? this.audioBuffer : this.videoBuffer;
if (!buffer || !media) {
return NaN;
}
const info = BufferHelper.bufferInfo(
buffer,
media.currentTime,
this.config.maxBufferHole,
);
return info.len * 1000;
}
/**
* Create a playlist loader
*/
private createPlaylistLoader(): PlaylistLoaderConstructor | undefined {
const { pLoader } = this.config;
const apply = this.applyPlaylistData;
const Ctor = pLoader || (this.config.loader as PlaylistLoaderConstructor);
return class CmcdPlaylistLoader {
private loader: Loader<PlaylistLoaderContext>;
constructor(config: HlsConfig) {
this.loader = new Ctor(config);
}
get stats() {
return this.loader.stats;
}
get context() {
return this.loader.context;
}
destroy() {
this.loader.destroy();
}
abort() {
this.loader.abort();
}
load(
context: PlaylistLoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<PlaylistLoaderContext>,
) {
apply(context);
this.loader.load(context, config, callbacks);
}
};
}
/**
* Create a playlist loader
*/
private createFragmentLoader(): FragmentLoaderConstructor | undefined {
const { fLoader } = this.config;
const apply = this.applyFragmentData;
const Ctor = fLoader || (this.config.loader as FragmentLoaderConstructor);
return class CmcdFragmentLoader {
private loader: Loader<FragmentLoaderContext>;
constructor(config: HlsConfig) {
this.loader = new Ctor(config);
}
get stats() {
return this.loader.stats;
}
get context() {
return this.loader.context;
}
destroy() {
this.loader.destroy();
}
abort() {
this.loader.abort();
}
load(
context: FragmentLoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<FragmentLoaderContext>,
) {
apply(context);
this.loader.load(context, config, callbacks);
}
};
}
}

View File

@@ -0,0 +1,602 @@
import { Events } from '../events';
import { Level } from '../types/level';
import { reassignFragmentLevelIndexes } from '../utils/level-helper';
import { AttrList } from '../utils/attr-list';
import { ErrorActionFlags, NetworkErrorAction } from './error-controller';
import { logger } from '../utils/logger';
import {
PlaylistContextType,
type Loader,
type LoaderCallbacks,
type LoaderConfiguration,
type LoaderContext,
type LoaderResponse,
type LoaderStats,
} from '../types/loader';
import type Hls from '../hls';
import type { NetworkComponentAPI } from '../types/component-api';
import type {
SteeringManifestLoadedData,
ErrorData,
ManifestLoadedData,
ManifestParsedData,
} from '../types/events';
import type { RetryConfig } from '../config';
import type { MediaAttributes, MediaPlaylist } from '../types/media-playlist';
export type SteeringManifest = {
VERSION: 1;
TTL: number;
'RELOAD-URI'?: string;
'PATHWAY-PRIORITY': string[];
'PATHWAY-CLONES'?: PathwayClone[];
};
export type PathwayClone = {
'BASE-ID': string;
ID: string;
'URI-REPLACEMENT': UriReplacement;
};
export type UriReplacement = {
HOST?: string;
PARAMS?: { [queryParameter: string]: string };
'PER-VARIANT-URIS'?: { [stableVariantId: string]: string };
'PER-RENDITION-URIS'?: { [stableRenditionId: string]: string };
};
const PATHWAY_PENALTY_DURATION_MS = 300000;
export default class ContentSteeringController implements NetworkComponentAPI {
private readonly hls: Hls;
private log: (msg: any) => void;
private loader: Loader<LoaderContext> | null = null;
private uri: string | null = null;
private pathwayId: string = '.';
private pathwayPriority: string[] | null = null;
private timeToLoad: number = 300;
private reloadTimer: number = -1;
private updated: number = 0;
private started: boolean = false;
private enabled: boolean = true;
private levels: Level[] | null = null;
private audioTracks: MediaPlaylist[] | null = null;
private subtitleTracks: MediaPlaylist[] | null = null;
private penalizedPathways: { [pathwayId: string]: number } = {};
constructor(hls: Hls) {
this.hls = hls;
this.log = logger.log.bind(logger, `[content-steering]:`);
this.registerListeners();
}
private registerListeners() {
const hls = this.hls;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
const hls = this.hls;
if (!hls) {
return;
}
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.ERROR, this.onError, this);
}
startLoad() {
this.started = true;
this.clearTimeout();
if (this.enabled && this.uri) {
if (this.updated) {
const ttl = this.timeToLoad * 1000 - (performance.now() - this.updated);
if (ttl > 0) {
this.scheduleRefresh(this.uri, ttl);
return;
}
}
this.loadSteeringManifest(this.uri);
}
}
stopLoad() {
this.started = false;
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
this.clearTimeout();
}
clearTimeout() {
if (this.reloadTimer !== -1) {
self.clearTimeout(this.reloadTimer);
this.reloadTimer = -1;
}
}
destroy() {
this.unregisterListeners();
this.stopLoad();
// @ts-ignore
this.hls = null;
this.levels = this.audioTracks = this.subtitleTracks = null;
}
removeLevel(levelToRemove: Level) {
const levels = this.levels;
if (levels) {
this.levels = levels.filter((level) => level !== levelToRemove);
}
}
private onManifestLoading() {
this.stopLoad();
this.enabled = true;
this.timeToLoad = 300;
this.updated = 0;
this.uri = null;
this.pathwayId = '.';
this.levels = this.audioTracks = this.subtitleTracks = null;
}
private onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData,
) {
const { contentSteering } = data;
if (contentSteering === null) {
return;
}
this.pathwayId = contentSteering.pathwayId;
this.uri = contentSteering.uri;
if (this.started) {
this.startLoad();
}
}
private onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData,
) {
this.audioTracks = data.audioTracks;
this.subtitleTracks = data.subtitleTracks;
}
private onError(event: Events.ERROR, data: ErrorData) {
const { errorAction } = data;
if (
errorAction?.action === NetworkErrorAction.SendAlternateToPenaltyBox &&
errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost
) {
const levels = this.levels;
let pathwayPriority = this.pathwayPriority;
let errorPathway = this.pathwayId;
if (data.context) {
const { groupId, pathwayId, type } = data.context;
if (groupId && levels) {
errorPathway = this.getPathwayForGroupId(groupId, type, errorPathway);
} else if (pathwayId) {
errorPathway = pathwayId;
}
}
if (!(errorPathway in this.penalizedPathways)) {
this.penalizedPathways[errorPathway] = performance.now();
}
if (!pathwayPriority && levels) {
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
pathwayPriority = levels.reduce((pathways, level) => {
if (pathways.indexOf(level.pathwayId) === -1) {
pathways.push(level.pathwayId);
}
return pathways;
}, [] as string[]);
}
if (pathwayPriority && pathwayPriority.length > 1) {
this.updatePathwayPriority(pathwayPriority);
errorAction.resolved = this.pathwayId !== errorPathway;
}
if (!errorAction.resolved) {
logger.warn(
`Could not resolve ${data.details} ("${
data.error.message
}") with content-steering for Pathway: ${errorPathway} levels: ${
levels ? levels.length : levels
} priorities: ${JSON.stringify(
pathwayPriority,
)} penalized: ${JSON.stringify(this.penalizedPathways)}`,
);
}
}
}
public filterParsedLevels(levels: Level[]): Level[] {
// Filter levels to only include those that are in the initial pathway
this.levels = levels;
let pathwayLevels = this.getLevelsForPathway(this.pathwayId);
if (pathwayLevels.length === 0) {
const pathwayId = levels[0].pathwayId;
this.log(
`No levels found in Pathway ${this.pathwayId}. Setting initial Pathway to "${pathwayId}"`,
);
pathwayLevels = this.getLevelsForPathway(pathwayId);
this.pathwayId = pathwayId;
}
if (pathwayLevels.length !== levels.length) {
this.log(
`Found ${pathwayLevels.length}/${levels.length} levels in Pathway "${this.pathwayId}"`,
);
}
return pathwayLevels;
}
private getLevelsForPathway(pathwayId: string): Level[] {
if (this.levels === null) {
return [];
}
return this.levels.filter((level) => pathwayId === level.pathwayId);
}
private updatePathwayPriority(pathwayPriority: string[]) {
this.pathwayPriority = pathwayPriority;
let levels: Level[] | undefined;
// Evaluate if we should remove the pathway from the penalized list
const penalizedPathways = this.penalizedPathways;
const now = performance.now();
Object.keys(penalizedPathways).forEach((pathwayId) => {
if (now - penalizedPathways[pathwayId] > PATHWAY_PENALTY_DURATION_MS) {
delete penalizedPathways[pathwayId];
}
});
for (let i = 0; i < pathwayPriority.length; i++) {
const pathwayId = pathwayPriority[i];
if (pathwayId in penalizedPathways) {
continue;
}
if (pathwayId === this.pathwayId) {
return;
}
const selectedIndex = this.hls.nextLoadLevel;
const selectedLevel: Level = this.hls.levels[selectedIndex];
levels = this.getLevelsForPathway(pathwayId);
if (levels.length > 0) {
this.log(`Setting Pathway to "${pathwayId}"`);
this.pathwayId = pathwayId;
reassignFragmentLevelIndexes(levels);
this.hls.trigger(Events.LEVELS_UPDATED, { levels });
// Set LevelController's level to trigger LEVEL_SWITCHING which loads playlist if needed
const levelAfterChange = this.hls.levels[selectedIndex];
if (selectedLevel && levelAfterChange && this.levels) {
if (
levelAfterChange.attrs['STABLE-VARIANT-ID'] !==
selectedLevel.attrs['STABLE-VARIANT-ID'] &&
levelAfterChange.bitrate !== selectedLevel.bitrate
) {
this.log(
`Unstable Pathways change from bitrate ${selectedLevel.bitrate} to ${levelAfterChange.bitrate}`,
);
}
this.hls.nextLoadLevel = selectedIndex;
}
break;
}
}
}
private getPathwayForGroupId(
groupId: string,
type: PlaylistContextType,
defaultPathway: string,
): string {
const levels = this.getLevelsForPathway(defaultPathway).concat(
this.levels || [],
);
for (let i = 0; i < levels.length; i++) {
if (
(type === PlaylistContextType.AUDIO_TRACK &&
levels[i].hasAudioGroup(groupId)) ||
(type === PlaylistContextType.SUBTITLE_TRACK &&
levels[i].hasSubtitleGroup(groupId))
) {
return levels[i].pathwayId;
}
}
return defaultPathway;
}
private clonePathways(pathwayClones: PathwayClone[]) {
const levels = this.levels;
if (!levels) {
return;
}
const audioGroupCloneMap: Record<string, string> = {};
const subtitleGroupCloneMap: Record<string, string> = {};
pathwayClones.forEach((pathwayClone) => {
const {
ID: cloneId,
'BASE-ID': baseId,
'URI-REPLACEMENT': uriReplacement,
} = pathwayClone;
if (levels.some((level) => level.pathwayId === cloneId)) {
return;
}
const clonedVariants = this.getLevelsForPathway(baseId).map(
(baseLevel) => {
const attributes = new AttrList(baseLevel.attrs);
attributes['PATHWAY-ID'] = cloneId;
const clonedAudioGroupId: string | undefined =
attributes.AUDIO && `${attributes.AUDIO}_clone_${cloneId}`;
const clonedSubtitleGroupId: string | undefined =
attributes.SUBTITLES && `${attributes.SUBTITLES}_clone_${cloneId}`;
if (clonedAudioGroupId) {
audioGroupCloneMap[attributes.AUDIO] = clonedAudioGroupId;
attributes.AUDIO = clonedAudioGroupId;
}
if (clonedSubtitleGroupId) {
subtitleGroupCloneMap[attributes.SUBTITLES] = clonedSubtitleGroupId;
attributes.SUBTITLES = clonedSubtitleGroupId;
}
const url = performUriReplacement(
baseLevel.uri,
attributes['STABLE-VARIANT-ID'],
'PER-VARIANT-URIS',
uriReplacement,
);
const clonedLevel = new Level({
attrs: attributes,
audioCodec: baseLevel.audioCodec,
bitrate: baseLevel.bitrate,
height: baseLevel.height,
name: baseLevel.name,
url,
videoCodec: baseLevel.videoCodec,
width: baseLevel.width,
});
if (baseLevel.audioGroups) {
for (let i = 1; i < baseLevel.audioGroups.length; i++) {
clonedLevel.addGroupId(
'audio',
`${baseLevel.audioGroups[i]}_clone_${cloneId}`,
);
}
}
if (baseLevel.subtitleGroups) {
for (let i = 1; i < baseLevel.subtitleGroups.length; i++) {
clonedLevel.addGroupId(
'text',
`${baseLevel.subtitleGroups[i]}_clone_${cloneId}`,
);
}
}
return clonedLevel;
},
);
levels.push(...clonedVariants);
cloneRenditionGroups(
this.audioTracks,
audioGroupCloneMap,
uriReplacement,
cloneId,
);
cloneRenditionGroups(
this.subtitleTracks,
subtitleGroupCloneMap,
uriReplacement,
cloneId,
);
});
}
private loadSteeringManifest(uri: string) {
const config = this.hls.config;
const Loader = config.loader;
if (this.loader) {
this.loader.destroy();
}
this.loader = new Loader(config) as Loader<LoaderContext>;
let url: URL;
try {
url = new self.URL(uri);
} catch (error) {
this.enabled = false;
this.log(`Failed to parse Steering Manifest URI: ${uri}`);
return;
}
if (url.protocol !== 'data:') {
const throughput =
(this.hls.bandwidthEstimate || config.abrEwmaDefaultEstimate) | 0;
url.searchParams.set('_HLS_pathway', this.pathwayId);
url.searchParams.set('_HLS_throughput', '' + throughput);
}
const context: LoaderContext = {
responseType: 'json',
url: url.href,
};
const loadPolicy = config.steeringManifestLoadPolicy.default;
const legacyRetryCompatibility: RetryConfig | Record<string, void> =
loadPolicy.errorRetry || loadPolicy.timeoutRetry || {};
const loaderConfig: LoaderConfiguration = {
loadPolicy,
timeout: loadPolicy.maxLoadTimeMs,
maxRetry: legacyRetryCompatibility.maxNumRetry || 0,
retryDelay: legacyRetryCompatibility.retryDelayMs || 0,
maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0,
};
const callbacks: LoaderCallbacks<LoaderContext> = {
onSuccess: (
response: LoaderResponse,
stats: LoaderStats,
context: LoaderContext,
networkDetails: any,
) => {
this.log(`Loaded steering manifest: "${url}"`);
const steeringData = response.data as SteeringManifest;
if (steeringData.VERSION !== 1) {
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
return;
}
this.updated = performance.now();
this.timeToLoad = steeringData.TTL;
const {
'RELOAD-URI': reloadUri,
'PATHWAY-CLONES': pathwayClones,
'PATHWAY-PRIORITY': pathwayPriority,
} = steeringData;
if (reloadUri) {
try {
this.uri = new self.URL(reloadUri, url).href;
} catch (error) {
this.enabled = false;
this.log(
`Failed to parse Steering Manifest RELOAD-URI: ${reloadUri}`,
);
return;
}
}
this.scheduleRefresh(this.uri || context.url);
if (pathwayClones) {
this.clonePathways(pathwayClones);
}
const loadedSteeringData: SteeringManifestLoadedData = {
steeringManifest: steeringData,
url: url.toString(),
};
this.hls.trigger(Events.STEERING_MANIFEST_LOADED, loadedSteeringData);
if (pathwayPriority) {
this.updatePathwayPriority(pathwayPriority);
}
},
onError: (
error: { code: number; text: string },
context: LoaderContext,
networkDetails: any,
stats: LoaderStats,
) => {
this.log(
`Error loading steering manifest: ${error.code} ${error.text} (${context.url})`,
);
this.stopLoad();
if (error.code === 410) {
this.enabled = false;
this.log(`Steering manifest ${context.url} no longer available`);
return;
}
let ttl = this.timeToLoad * 1000;
if (error.code === 429) {
const loader = this.loader;
if (typeof loader?.getResponseHeader === 'function') {
const retryAfter = loader.getResponseHeader('Retry-After');
if (retryAfter) {
ttl = parseFloat(retryAfter) * 1000;
}
}
this.log(`Steering manifest ${context.url} rate limited`);
return;
}
this.scheduleRefresh(this.uri || context.url, ttl);
},
onTimeout: (
stats: LoaderStats,
context: LoaderContext,
networkDetails: any,
) => {
this.log(`Timeout loading steering manifest (${context.url})`);
this.scheduleRefresh(this.uri || context.url);
},
};
this.log(`Requesting steering manifest: ${url}`);
this.loader.load(context, loaderConfig, callbacks);
}
private scheduleRefresh(uri: string, ttlMs: number = this.timeToLoad * 1000) {
this.clearTimeout();
this.reloadTimer = self.setTimeout(() => {
const media = this.hls?.media;
if (media && !media.ended) {
this.loadSteeringManifest(uri);
return;
}
this.scheduleRefresh(uri, this.timeToLoad * 1000);
}, ttlMs);
}
}
function cloneRenditionGroups(
tracks: MediaPlaylist[] | null,
groupCloneMap: Record<string, string>,
uriReplacement: UriReplacement,
cloneId: string,
) {
if (!tracks) {
return;
}
Object.keys(groupCloneMap).forEach((audioGroupId) => {
const clonedTracks = tracks
.filter((track) => track.groupId === audioGroupId)
.map((track) => {
const clonedTrack = Object.assign({}, track);
clonedTrack.details = undefined;
clonedTrack.attrs = new AttrList(clonedTrack.attrs) as MediaAttributes;
clonedTrack.url = clonedTrack.attrs.URI = performUriReplacement(
track.url,
track.attrs['STABLE-RENDITION-ID'],
'PER-RENDITION-URIS',
uriReplacement,
);
clonedTrack.groupId = clonedTrack.attrs['GROUP-ID'] =
groupCloneMap[audioGroupId];
clonedTrack.attrs['PATHWAY-ID'] = cloneId;
return clonedTrack;
});
tracks.push(...clonedTracks);
});
}
function performUriReplacement(
uri: string,
stableId: string | undefined,
perOptionKey: 'PER-VARIANT-URIS' | 'PER-RENDITION-URIS',
uriReplacement: UriReplacement,
): string {
const {
HOST: host,
PARAMS: params,
[perOptionKey]: perOptionUris,
} = uriReplacement;
let perVariantUri;
if (stableId) {
perVariantUri = perOptionUris?.[stableId];
if (perVariantUri) {
uri = perVariantUri;
}
}
const url = new self.URL(uri);
if (host && !perVariantUri) {
url.host = host;
}
if (params) {
Object.keys(params)
.sort()
.forEach((key) => {
if (key) {
url.searchParams.set(key, params[key]);
}
});
}
return url.href;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,515 @@
import { Events } from '../events';
import { ErrorDetails, ErrorTypes } from '../errors';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import {
getRetryConfig,
isTimeoutError,
shouldRetry,
} from '../utils/error-helper';
import { findFragmentByPTS } from './fragment-finders';
import { HdcpLevel, HdcpLevels } from '../types/level';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { RetryConfig } from '../config';
import type { NetworkComponentAPI } from '../types/component-api';
import type { ErrorData } from '../types/events';
import type { Fragment } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
export const enum NetworkErrorAction {
DoNothing = 0,
SendEndCallback = 1, // Reserved for future use
SendAlternateToPenaltyBox = 2,
RemoveAlternatePermanently = 3, // Reserved for future use
InsertDiscontinuity = 4, // Reserved for future use
RetryRequest = 5,
}
export const enum ErrorActionFlags {
None = 0,
MoveAllAlternatesMatchingHost = 1,
MoveAllAlternatesMatchingHDCP = 1 << 1,
SwitchToSDR = 1 << 2, // Reserved for future use
}
export type IErrorAction = {
action: NetworkErrorAction;
flags: ErrorActionFlags;
retryCount?: number;
retryConfig?: RetryConfig;
hdcpLevel?: HdcpLevel;
nextAutoLevel?: number;
resolved?: boolean;
};
type PenalizedRendition = {
lastErrorPerfMs: number;
errors: ErrorData[];
details?: LevelDetails;
};
type PenalizedRenditions = { [key: number]: PenalizedRendition };
export default class ErrorController implements NetworkComponentAPI {
private readonly hls: Hls;
private playlistError: number = 0;
private penalizedRenditions: PenalizedRenditions = {};
private log: (msg: any) => void;
private warn: (msg: any) => void;
private error: (msg: any) => void;
constructor(hls: Hls) {
this.hls = hls;
this.log = logger.log.bind(logger, `[info]:`);
this.warn = logger.warn.bind(logger, `[warning]:`);
this.error = logger.error.bind(logger, `[error]:`);
this.registerListeners();
}
private registerListeners() {
const hls = this.hls;
hls.on(Events.ERROR, this.onError, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
}
private unregisterListeners() {
const hls = this.hls;
if (!hls) {
return;
}
hls.off(Events.ERROR, this.onError, this);
hls.off(Events.ERROR, this.onErrorOut, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
}
destroy() {
this.unregisterListeners();
// @ts-ignore
this.hls = null;
this.penalizedRenditions = {};
}
startLoad(startPosition: number): void {}
stopLoad(): void {
this.playlistError = 0;
}
private getVariantLevelIndex(frag: Fragment | undefined): number {
return frag?.type === PlaylistLevelType.MAIN
? frag.level
: this.hls.loadLevel;
}
private onManifestLoading() {
this.playlistError = 0;
this.penalizedRenditions = {};
}
private onLevelUpdated() {
this.playlistError = 0;
}
private onError(event: Events.ERROR, data: ErrorData) {
if (data.fatal) {
return;
}
const hls = this.hls;
const context = data.context;
switch (data.details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
data.errorAction = this.getFragRetryOrSwitchAction(data);
return;
case ErrorDetails.FRAG_PARSING_ERROR:
// ignore empty segment errors marked as gap
if (data.frag?.gap) {
data.errorAction = {
action: NetworkErrorAction.DoNothing,
flags: ErrorActionFlags.None,
};
return;
}
// falls through
case ErrorDetails.FRAG_GAP:
case ErrorDetails.FRAG_DECRYPT_ERROR: {
// Switch level if possible, otherwise allow retry count to reach max error retries
data.errorAction = this.getFragRetryOrSwitchAction(data);
data.errorAction.action = NetworkErrorAction.SendAlternateToPenaltyBox;
return;
}
case ErrorDetails.LEVEL_EMPTY_ERROR:
case ErrorDetails.LEVEL_PARSING_ERROR:
{
// Only retry when empty and live
const levelIndex =
data.parent === PlaylistLevelType.MAIN
? (data.level as number)
: hls.loadLevel;
if (
data.details === ErrorDetails.LEVEL_EMPTY_ERROR &&
!!data.context?.levelDetails?.live
) {
data.errorAction = this.getPlaylistRetryOrSwitchAction(
data,
levelIndex,
);
} else {
// Escalate to fatal if not retrying or switching
data.levelRetry = false;
data.errorAction = this.getLevelSwitchAction(data, levelIndex);
}
}
return;
case ErrorDetails.LEVEL_LOAD_ERROR:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
if (typeof context?.level === 'number') {
data.errorAction = this.getPlaylistRetryOrSwitchAction(
data,
context.level,
);
}
return;
case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
case ErrorDetails.SUBTITLE_LOAD_ERROR:
case ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT:
if (context) {
const level = hls.levels[hls.loadLevel];
if (
level &&
((context.type === PlaylistContextType.AUDIO_TRACK &&
level.hasAudioGroup(context.groupId)) ||
(context.type === PlaylistContextType.SUBTITLE_TRACK &&
level.hasSubtitleGroup(context.groupId)))
) {
// Perform Pathway switch or Redundant failover if possible for fastest recovery
// otherwise allow playlist retry count to reach max error retries
data.errorAction = this.getPlaylistRetryOrSwitchAction(
data,
hls.loadLevel,
);
data.errorAction.action =
NetworkErrorAction.SendAlternateToPenaltyBox;
data.errorAction.flags =
ErrorActionFlags.MoveAllAlternatesMatchingHost;
return;
}
}
return;
case ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED:
{
const level = hls.levels[hls.loadLevel];
const restrictedHdcpLevel = level?.attrs['HDCP-LEVEL'];
if (restrictedHdcpLevel) {
data.errorAction = {
action: NetworkErrorAction.SendAlternateToPenaltyBox,
flags: ErrorActionFlags.MoveAllAlternatesMatchingHDCP,
hdcpLevel: restrictedHdcpLevel,
};
} else {
this.keySystemError(data);
}
}
return;
case ErrorDetails.BUFFER_ADD_CODEC_ERROR:
case ErrorDetails.REMUX_ALLOC_ERROR:
case ErrorDetails.BUFFER_APPEND_ERROR:
data.errorAction = this.getLevelSwitchAction(
data,
data.level ?? hls.loadLevel,
);
return;
case ErrorDetails.INTERNAL_EXCEPTION:
case ErrorDetails.BUFFER_APPENDING_ERROR:
case ErrorDetails.BUFFER_FULL_ERROR:
case ErrorDetails.LEVEL_SWITCH_ERROR:
case ErrorDetails.BUFFER_STALLED_ERROR:
case ErrorDetails.BUFFER_SEEK_OVER_HOLE:
case ErrorDetails.BUFFER_NUDGE_ON_STALL:
data.errorAction = {
action: NetworkErrorAction.DoNothing,
flags: ErrorActionFlags.None,
};
return;
}
if (data.type === ErrorTypes.KEY_SYSTEM_ERROR) {
this.keySystemError(data);
}
}
private keySystemError(data: ErrorData) {
const levelIndex = this.getVariantLevelIndex(data.frag);
// Do not retry level. Escalate to fatal if switching levels fails.
data.levelRetry = false;
data.errorAction = this.getLevelSwitchAction(data, levelIndex);
}
private getPlaylistRetryOrSwitchAction(
data: ErrorData,
levelIndex: number | null | undefined,
): IErrorAction {
const hls = this.hls;
const retryConfig = getRetryConfig(hls.config.playlistLoadPolicy, data);
const retryCount = this.playlistError++;
const retry = shouldRetry(
retryConfig,
retryCount,
isTimeoutError(data),
data.response,
);
if (retry) {
return {
action: NetworkErrorAction.RetryRequest,
flags: ErrorActionFlags.None,
retryConfig,
retryCount,
};
}
const errorAction = this.getLevelSwitchAction(data, levelIndex);
if (retryConfig) {
errorAction.retryConfig = retryConfig;
errorAction.retryCount = retryCount;
}
return errorAction;
}
private getFragRetryOrSwitchAction(data: ErrorData): IErrorAction {
const hls = this.hls;
// Share fragment error count accross media options (main, audio, subs)
// This allows for level based rendition switching when media option assets fail
const variantLevelIndex = this.getVariantLevelIndex(data.frag);
const level = hls.levels[variantLevelIndex];
const { fragLoadPolicy, keyLoadPolicy } = hls.config;
const retryConfig = getRetryConfig(
data.details.startsWith('key') ? keyLoadPolicy : fragLoadPolicy,
data,
);
const fragmentErrors = hls.levels.reduce(
(acc, level) => acc + level.fragmentError,
0,
);
// Switch levels when out of retried or level index out of bounds
if (level) {
if (data.details !== ErrorDetails.FRAG_GAP) {
level.fragmentError++;
}
const retry = shouldRetry(
retryConfig,
fragmentErrors,
isTimeoutError(data),
data.response,
);
if (retry) {
return {
action: NetworkErrorAction.RetryRequest,
flags: ErrorActionFlags.None,
retryConfig,
retryCount: fragmentErrors,
};
}
}
// Reach max retry count, or Missing level reference
// Switch to valid index
const errorAction = this.getLevelSwitchAction(data, variantLevelIndex);
// Add retry details to allow skipping of FRAG_PARSING_ERROR
if (retryConfig) {
errorAction.retryConfig = retryConfig;
errorAction.retryCount = fragmentErrors;
}
return errorAction;
}
private getLevelSwitchAction(
data: ErrorData,
levelIndex: number | null | undefined,
): IErrorAction {
const hls = this.hls;
if (levelIndex === null || levelIndex === undefined) {
levelIndex = hls.loadLevel;
}
const level = this.hls.levels[levelIndex];
if (level) {
const errorDetails = data.details;
level.loadError++;
if (errorDetails === ErrorDetails.BUFFER_APPEND_ERROR) {
level.fragmentError++;
}
// Search for next level to retry
let nextLevel = -1;
const { levels, loadLevel, minAutoLevel, maxAutoLevel } = hls;
if (!hls.autoLevelEnabled) {
hls.loadLevel = -1;
}
const fragErrorType = data.frag?.type;
// Find alternate audio codec if available on audio codec error
const isAudioCodecError =
(fragErrorType === PlaylistLevelType.AUDIO &&
errorDetails === ErrorDetails.FRAG_PARSING_ERROR) ||
(data.sourceBufferName === 'audio' &&
(errorDetails === ErrorDetails.BUFFER_ADD_CODEC_ERROR ||
errorDetails === ErrorDetails.BUFFER_APPEND_ERROR));
const findAudioCodecAlternate =
isAudioCodecError &&
levels.some(({ audioCodec }) => level.audioCodec !== audioCodec);
// Find alternate video codec if available on video codec error
const isVideoCodecError =
data.sourceBufferName === 'video' &&
(errorDetails === ErrorDetails.BUFFER_ADD_CODEC_ERROR ||
errorDetails === ErrorDetails.BUFFER_APPEND_ERROR);
const findVideoCodecAlternate =
isVideoCodecError &&
levels.some(
({ codecSet, audioCodec }) =>
level.codecSet !== codecSet && level.audioCodec === audioCodec,
);
const { type: playlistErrorType, groupId: playlistErrorGroupId } =
data.context ?? {};
for (let i = levels.length; i--; ) {
const candidate = (i + loadLevel) % levels.length;
if (
candidate !== loadLevel &&
candidate >= minAutoLevel &&
candidate <= maxAutoLevel &&
levels[candidate].loadError === 0
) {
const levelCandidate = levels[candidate];
// Skip level switch if GAP tag is found in next level at same position
if (
errorDetails === ErrorDetails.FRAG_GAP &&
fragErrorType === PlaylistLevelType.MAIN &&
data.frag
) {
const levelDetails = levels[candidate].details;
if (levelDetails) {
const fragCandidate = findFragmentByPTS(
data.frag,
levelDetails.fragments,
data.frag.start,
);
if (fragCandidate?.gap) {
continue;
}
}
} else if (
(playlistErrorType === PlaylistContextType.AUDIO_TRACK &&
levelCandidate.hasAudioGroup(playlistErrorGroupId)) ||
(playlistErrorType === PlaylistContextType.SUBTITLE_TRACK &&
levelCandidate.hasSubtitleGroup(playlistErrorGroupId))
) {
// For audio/subs playlist errors find another group ID or fallthrough to redundant fail-over
continue;
} else if (
(fragErrorType === PlaylistLevelType.AUDIO &&
level.audioGroups?.some((groupId) =>
levelCandidate.hasAudioGroup(groupId),
)) ||
(fragErrorType === PlaylistLevelType.SUBTITLE &&
level.subtitleGroups?.some((groupId) =>
levelCandidate.hasSubtitleGroup(groupId),
)) ||
(findAudioCodecAlternate &&
level.audioCodec === levelCandidate.audioCodec) ||
(!findAudioCodecAlternate &&
level.audioCodec !== levelCandidate.audioCodec) ||
(findVideoCodecAlternate &&
level.codecSet === levelCandidate.codecSet)
) {
// For video/audio/subs frag errors find another group ID or fallthrough to redundant fail-over
continue;
}
nextLevel = candidate;
break;
}
}
if (nextLevel > -1 && hls.loadLevel !== nextLevel) {
data.levelRetry = true;
this.playlistError = 0;
return {
action: NetworkErrorAction.SendAlternateToPenaltyBox,
flags: ErrorActionFlags.None,
nextAutoLevel: nextLevel,
};
}
}
// No levels to switch / Manual level selection / Level not found
// Resolve with Pathway switch, Redundant fail-over, or stay on lowest Level
return {
action: NetworkErrorAction.SendAlternateToPenaltyBox,
flags: ErrorActionFlags.MoveAllAlternatesMatchingHost,
};
}
public onErrorOut(event: Events.ERROR, data: ErrorData) {
switch (data.errorAction?.action) {
case NetworkErrorAction.DoNothing:
break;
case NetworkErrorAction.SendAlternateToPenaltyBox:
this.sendAlternateToPenaltyBox(data);
if (
!data.errorAction.resolved &&
data.details !== ErrorDetails.FRAG_GAP
) {
data.fatal = true;
} else if (/MediaSource readyState: ended/.test(data.error.message)) {
this.warn(
`MediaSource ended after "${data.sourceBufferName}" sourceBuffer append error. Attempting to recover from media error.`,
);
this.hls.recoverMediaError();
}
break;
case NetworkErrorAction.RetryRequest:
// handled by stream and playlist/level controllers
break;
}
if (data.fatal) {
this.hls.stopLoad();
return;
}
}
private sendAlternateToPenaltyBox(data: ErrorData) {
const hls = this.hls;
const errorAction = data.errorAction;
if (!errorAction) {
return;
}
const { flags, hdcpLevel, nextAutoLevel } = errorAction;
switch (flags) {
case ErrorActionFlags.None:
this.switchLevel(data, nextAutoLevel);
break;
case ErrorActionFlags.MoveAllAlternatesMatchingHDCP:
if (hdcpLevel) {
hls.maxHdcpLevel = HdcpLevels[HdcpLevels.indexOf(hdcpLevel) - 1];
errorAction.resolved = true;
}
this.warn(
`Restricting playback to HDCP-LEVEL of "${hls.maxHdcpLevel}" or lower`,
);
break;
}
// If not resolved by previous actions try to switch to next level
if (!errorAction.resolved) {
this.switchLevel(data, nextAutoLevel);
}
}
private switchLevel(data: ErrorData, levelIndex: number | undefined) {
if (levelIndex !== undefined && data.errorAction) {
this.warn(`switching to level ${levelIndex} after ${data.details}`);
this.hls.nextAutoLevel = levelIndex;
data.errorAction.resolved = true;
// Stream controller is responsible for this but won't switch on false start
this.hls.nextLoadLevel = this.hls.nextAutoLevel;
}
}
}

View File

@@ -0,0 +1,141 @@
import { Events } from '../events';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { MediaAttachingData } from '../types/events';
import StreamController from './stream-controller';
class FPSController implements ComponentAPI {
private hls: Hls;
private isVideoPlaybackQualityAvailable: boolean = false;
private timer?: number;
private media: HTMLVideoElement | null = null;
private lastTime: any;
private lastDroppedFrames: number = 0;
private lastDecodedFrames: number = 0;
// stream controller must be provided as a dependency!
private streamController!: StreamController;
constructor(hls: Hls) {
this.hls = hls;
this.registerListeners();
}
public setStreamController(streamController: StreamController) {
this.streamController = streamController;
}
protected registerListeners() {
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
}
protected unregisterListeners() {
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
}
destroy() {
if (this.timer) {
clearInterval(this.timer);
}
this.unregisterListeners();
this.isVideoPlaybackQualityAvailable = false;
this.media = null;
}
protected onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData,
) {
const config = this.hls.config;
if (config.capLevelOnFPSDrop) {
const media =
data.media instanceof self.HTMLVideoElement ? data.media : null;
this.media = media;
if (media && typeof media.getVideoPlaybackQuality === 'function') {
this.isVideoPlaybackQualityAvailable = true;
}
self.clearInterval(this.timer);
this.timer = self.setInterval(
this.checkFPSInterval.bind(this),
config.fpsDroppedMonitoringPeriod,
);
}
}
checkFPS(
video: HTMLVideoElement,
decodedFrames: number,
droppedFrames: number,
) {
const currentTime = performance.now();
if (decodedFrames) {
if (this.lastTime) {
const currentPeriod = currentTime - this.lastTime;
const currentDropped = droppedFrames - this.lastDroppedFrames;
const currentDecoded = decodedFrames - this.lastDecodedFrames;
const droppedFPS = (1000 * currentDropped) / currentPeriod;
const hls = this.hls;
hls.trigger(Events.FPS_DROP, {
currentDropped: currentDropped,
currentDecoded: currentDecoded,
totalDroppedFrames: droppedFrames,
});
if (droppedFPS > 0) {
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
if (
currentDropped >
hls.config.fpsDroppedMonitoringThreshold * currentDecoded
) {
let currentLevel = hls.currentLevel;
logger.warn(
'drop FPS ratio greater than max allowed value for currentLevel: ' +
currentLevel,
);
if (
currentLevel > 0 &&
(hls.autoLevelCapping === -1 ||
hls.autoLevelCapping >= currentLevel)
) {
currentLevel = currentLevel - 1;
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
level: currentLevel,
droppedLevel: hls.currentLevel,
});
hls.autoLevelCapping = currentLevel;
this.streamController.nextLevelSwitch();
}
}
}
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
}
checkFPSInterval() {
const video = this.media;
if (video) {
if (this.isVideoPlaybackQualityAvailable) {
const videoPlaybackQuality = video.getVideoPlaybackQuality();
this.checkFPS(
video,
videoPlaybackQuality.totalVideoFrames,
videoPlaybackQuality.droppedVideoFrames,
);
} else {
// HTMLVideoElement doesn't include the webkit types
this.checkFPS(
video,
(video as any).webkitDecodedFrameCount as number,
(video as any).webkitDroppedFrameCount as number,
);
}
}
}
}
export default FPSController;

View File

@@ -0,0 +1,218 @@
import BinarySearch from '../utils/binary-search';
import { Fragment } from '../loader/fragment';
/**
* Returns first fragment whose endPdt value exceeds the given PDT, or null.
* @param fragments - The array of candidate fragments
* @param PDTValue - The PDT value which must be exceeded
* @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
*/
export function findFragmentByPDT(
fragments: Array<Fragment>,
PDTValue: number | null,
maxFragLookUpTolerance: number,
): Fragment | null {
if (
PDTValue === null ||
!Array.isArray(fragments) ||
!fragments.length ||
!Number.isFinite(PDTValue)
) {
return null;
}
// if less than start
const startPDT = fragments[0].programDateTime;
if (PDTValue < (startPDT || 0)) {
return null;
}
const endPDT = fragments[fragments.length - 1].endProgramDateTime;
if (PDTValue >= (endPDT || 0)) {
return null;
}
maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
for (let seg = 0; seg < fragments.length; ++seg) {
const frag = fragments[seg];
if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
return frag;
}
}
return null;
}
/**
* Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
* This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
* breaking any traps which would cause the same fragment to be continuously selected within a small range.
* @param fragPrevious - The last frag successfully appended
* @param fragments - The array of candidate fragments
* @param bufferEnd - The end of the contiguous buffered range the playhead is currently within
* @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
* @returns a matching fragment or null
*/
export function findFragmentByPTS(
fragPrevious: Fragment | null,
fragments: Array<Fragment>,
bufferEnd: number = 0,
maxFragLookUpTolerance: number = 0,
nextFragLookupTolerance: number = 0.005,
): Fragment | null {
let fragNext: Fragment | null = null;
if (fragPrevious) {
fragNext =
fragments[
(fragPrevious.sn as number) - (fragments[0].sn as number) + 1
] || null;
// check for buffer-end rounding error
const bufferEdgeError = fragPrevious.endDTS - bufferEnd;
if (bufferEdgeError > 0 && bufferEdgeError < 0.0000015) {
bufferEnd += 0.0000015;
}
} else if (bufferEnd === 0 && fragments[0].start === 0) {
fragNext = fragments[0];
}
// Prefer the next fragment if it's within tolerance
if (
fragNext &&
(((!fragPrevious || fragPrevious.level === fragNext.level) &&
fragmentWithinToleranceTest(
bufferEnd,
maxFragLookUpTolerance,
fragNext,
) === 0) ||
fragmentWithinFastStartSwitch(
fragNext,
fragPrevious,
Math.min(nextFragLookupTolerance, maxFragLookUpTolerance),
))
) {
return fragNext;
}
// We might be seeking past the tolerance so find the best match
const foundFragment = BinarySearch.search(
fragments,
fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance),
);
if (foundFragment && (foundFragment !== fragPrevious || !fragNext)) {
return foundFragment;
}
// If no match was found return the next fragment after fragPrevious, or null
return fragNext;
}
function fragmentWithinFastStartSwitch(
fragNext: Fragment,
fragPrevious: Fragment | null,
nextFragLookupTolerance: number,
): boolean {
if (
fragPrevious &&
fragPrevious.start === 0 &&
fragPrevious.level < fragNext.level &&
(fragPrevious.endPTS || 0) > 0
) {
const firstDuration = fragPrevious.tagList.reduce((duration, tag) => {
if (tag[0] === 'INF') {
duration += parseFloat(tag[1]);
}
return duration;
}, nextFragLookupTolerance);
return fragNext.start <= firstDuration;
}
return false;
}
/**
* The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
* @param candidate - The fragment to test
* @param bufferEnd - The end of the current buffered range the playhead is currently within
* @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
* @returns 0 if it matches, 1 if too low, -1 if too high
*/
export function fragmentWithinToleranceTest(
bufferEnd = 0,
maxFragLookUpTolerance = 0,
candidate: Fragment,
) {
// eagerly accept an accurate match (no tolerance)
if (
candidate.start <= bufferEnd &&
candidate.start + candidate.duration > bufferEnd
) {
return 0;
}
// offset should be within fragment boundary - config.maxFragLookUpTolerance
// this is to cope with situations like
// bufferEnd = 9.991
// frag[Ø] : [0,10]
// frag[1] : [10,20]
// bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
// frag start frag start+duration
// |-----------------------------|
// <---> <--->
// ...--------><-----------------------------><---------....
// previous frag matching fragment next frag
// return -1 return 0 return 1
// logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
// Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
const candidateLookupTolerance = Math.min(
maxFragLookUpTolerance,
candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0),
);
if (
candidate.start + candidate.duration - candidateLookupTolerance <=
bufferEnd
) {
return 1;
} else if (
candidate.start - candidateLookupTolerance > bufferEnd &&
candidate.start
) {
// if maxFragLookUpTolerance will have negative value then don't return -1 for first element
return -1;
}
return 0;
}
/**
* The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
* This function tests the candidate's program date time values, as represented in Unix time
* @param candidate - The fragment to test
* @param pdtBufferEnd - The Unix time representing the end of the current buffered range
* @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
* @returns true if contiguous, false otherwise
*/
export function pdtWithinToleranceTest(
pdtBufferEnd: number,
maxFragLookUpTolerance: number,
candidate: Fragment,
): boolean {
const candidateLookupTolerance =
Math.min(
maxFragLookUpTolerance,
candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0),
) * 1000;
// endProgramDateTime can be null, default to zero
const endProgramDateTime = candidate.endProgramDateTime || 0;
return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
}
export function findFragWithCC(
fragments: Fragment[],
cc: number,
): Fragment | null {
return BinarySearch.search(fragments, (candidate) => {
if (candidate.cc < cc) {
return 1;
} else if (candidate.cc > cc) {
return -1;
} else {
return 0;
}
});
}

View File

@@ -0,0 +1,508 @@
import { Events } from '../events';
import { Fragment, Part } from '../loader/fragment';
import { PlaylistLevelType } from '../types/loader';
import type { SourceBufferName } from '../types/buffer';
import type {
FragmentBufferedRange,
FragmentEntity,
FragmentTimeRange,
} from '../types/fragment-tracker';
import type { ComponentAPI } from '../types/component-api';
import type {
BufferAppendedData,
FragBufferedData,
FragLoadedData,
} from '../types/events';
import type Hls from '../hls';
export const enum FragmentState {
NOT_LOADED = 'NOT_LOADED',
APPENDING = 'APPENDING',
PARTIAL = 'PARTIAL',
OK = 'OK',
}
export class FragmentTracker implements ComponentAPI {
private activePartLists: { [key in PlaylistLevelType]?: Part[] } =
Object.create(null);
private endListFragments: { [key in PlaylistLevelType]?: FragmentEntity } =
Object.create(null);
private fragments: Partial<Record<string, FragmentEntity>> =
Object.create(null);
private timeRanges:
| {
[key in SourceBufferName]?: TimeRanges;
}
| null = Object.create(null);
private bufferPadding: number = 0.2;
private hls: Hls;
private hasGaps: boolean = false;
constructor(hls: Hls) {
this.hls = hls;
this._registerListeners();
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
}
public destroy() {
this._unregisterListeners();
// @ts-ignore
this.fragments =
// @ts-ignore
this.activePartLists =
// @ts-ignore
this.endListFragments =
this.timeRanges =
null;
}
/**
* Return a Fragment or Part with an appended range that matches the position and levelType
* Otherwise, return null
*/
public getAppendedFrag(
position: number,
levelType: PlaylistLevelType,
): Fragment | Part | null {
const activeParts = this.activePartLists[levelType];
if (activeParts) {
for (let i = activeParts.length; i--; ) {
const activePart = activeParts[i];
if (!activePart) {
break;
}
const appendedPTS = activePart.end;
if (
activePart.start <= position &&
appendedPTS !== null &&
position <= appendedPTS
) {
return activePart;
}
}
}
return this.getBufferedFrag(position, levelType);
}
/**
* Return a buffered Fragment that matches the position and levelType.
* A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
* If not found any Fragment, return null
*/
public getBufferedFrag(
position: number,
levelType: PlaylistLevelType,
): Fragment | null {
const { fragments } = this;
const keys = Object.keys(fragments);
for (let i = keys.length; i--; ) {
const fragmentEntity = fragments[keys[i]];
if (fragmentEntity?.body.type === levelType && fragmentEntity.buffered) {
const frag = fragmentEntity.body;
if (frag.start <= position && position <= frag.end) {
return frag;
}
}
}
return null;
}
/**
* Partial fragments effected by coded frame eviction will be removed
* The browser will unload parts of the buffer to free up memory for new buffer data
* Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
*/
public detectEvictedFragments(
elementaryStream: SourceBufferName,
timeRange: TimeRanges,
playlistType: PlaylistLevelType,
appendedPart?: Part | null,
) {
if (this.timeRanges) {
this.timeRanges[elementaryStream] = timeRange;
}
// Check if any flagged fragments have been unloaded
// excluding anything newer than appendedPartSn
const appendedPartSn = (appendedPart?.fragment.sn || -1) as number;
Object.keys(this.fragments).forEach((key) => {
const fragmentEntity = this.fragments[key];
if (!fragmentEntity) {
return;
}
if (appendedPartSn >= (fragmentEntity.body.sn as number)) {
return;
}
if (!fragmentEntity.buffered && !fragmentEntity.loaded) {
if (fragmentEntity.body.type === playlistType) {
this.removeFragment(fragmentEntity.body);
}
return;
}
const esData = fragmentEntity.range[elementaryStream];
if (!esData) {
return;
}
esData.time.some((time: FragmentTimeRange) => {
const isNotBuffered = !this.isTimeBuffered(
time.startPTS,
time.endPTS,
timeRange,
);
if (isNotBuffered) {
// Unregister partial fragment as it needs to load again to be reused
this.removeFragment(fragmentEntity.body);
}
return isNotBuffered;
});
});
}
/**
* Checks if the fragment passed in is loaded in the buffer properly
* Partially loaded fragments will be registered as a partial fragment
*/
public detectPartialFragments(data: FragBufferedData) {
const timeRanges = this.timeRanges;
const { frag, part } = data;
if (!timeRanges || frag.sn === 'initSegment') {
return;
}
const fragKey = getFragmentKey(frag);
const fragmentEntity = this.fragments[fragKey];
if (!fragmentEntity || (fragmentEntity.buffered && frag.gap)) {
return;
}
const isFragHint = !frag.relurl;
Object.keys(timeRanges).forEach((elementaryStream: SourceBufferName) => {
const streamInfo = frag.elementaryStreams[elementaryStream];
if (!streamInfo) {
return;
}
const timeRange = timeRanges[elementaryStream] as TimeRanges;
const partial = isFragHint || streamInfo.partial === true;
fragmentEntity.range[elementaryStream] = this.getBufferedTimes(
frag,
part,
partial,
timeRange,
);
});
fragmentEntity.loaded = null;
if (Object.keys(fragmentEntity.range).length) {
fragmentEntity.buffered = true;
const endList = (fragmentEntity.body.endList =
frag.endList || fragmentEntity.body.endList);
if (endList) {
this.endListFragments[fragmentEntity.body.type] = fragmentEntity;
}
if (!isPartial(fragmentEntity)) {
// Remove older fragment parts from lookup after frag is tracked as buffered
this.removeParts((frag.sn as number) - 1, frag.type);
}
} else {
// remove fragment if nothing was appended
this.removeFragment(fragmentEntity.body);
}
}
private removeParts(snToKeep: number, levelType: PlaylistLevelType) {
const activeParts = this.activePartLists[levelType];
if (!activeParts) {
return;
}
this.activePartLists[levelType] = activeParts.filter(
(part) => (part.fragment.sn as number) >= snToKeep,
);
}
public fragBuffered(frag: Fragment, force?: true) {
const fragKey = getFragmentKey(frag);
let fragmentEntity = this.fragments[fragKey];
if (!fragmentEntity && force) {
fragmentEntity = this.fragments[fragKey] = {
body: frag,
appendedPTS: null,
loaded: null,
buffered: false,
range: Object.create(null),
};
if (frag.gap) {
this.hasGaps = true;
}
}
if (fragmentEntity) {
fragmentEntity.loaded = null;
fragmentEntity.buffered = true;
}
}
private getBufferedTimes(
fragment: Fragment,
part: Part | null,
partial: boolean,
timeRange: TimeRanges,
): FragmentBufferedRange {
const buffered: FragmentBufferedRange = {
time: [],
partial,
};
const startPTS = fragment.start;
const endPTS = fragment.end;
const minEndPTS = fragment.minEndPTS || endPTS;
const maxStartPTS = fragment.maxStartPTS || startPTS;
for (let i = 0; i < timeRange.length; i++) {
const startTime = timeRange.start(i) - this.bufferPadding;
const endTime = timeRange.end(i) + this.bufferPadding;
if (maxStartPTS >= startTime && minEndPTS <= endTime) {
// Fragment is entirely contained in buffer
// No need to check the other timeRange times since it's completely playable
buffered.time.push({
startPTS: Math.max(startPTS, timeRange.start(i)),
endPTS: Math.min(endPTS, timeRange.end(i)),
});
break;
} else if (startPTS < endTime && endPTS > startTime) {
const start = Math.max(startPTS, timeRange.start(i));
const end = Math.min(endPTS, timeRange.end(i));
if (end > start) {
buffered.partial = true;
// Check for intersection with buffer
// Get playable sections of the fragment
buffered.time.push({
startPTS: start,
endPTS: end,
});
}
} else if (endPTS <= startTime) {
// No need to check the rest of the timeRange as it is in order
break;
}
}
return buffered;
}
/**
* Gets the partial fragment for a certain time
*/
public getPartialFragment(time: number): Fragment | null {
let bestFragment: Fragment | null = null;
let timePadding: number;
let startTime: number;
let endTime: number;
let bestOverlap: number = 0;
const { bufferPadding, fragments } = this;
Object.keys(fragments).forEach((key) => {
const fragmentEntity = fragments[key];
if (!fragmentEntity) {
return;
}
if (isPartial(fragmentEntity)) {
startTime = fragmentEntity.body.start - bufferPadding;
endTime = fragmentEntity.body.end + bufferPadding;
if (time >= startTime && time <= endTime) {
// Use the fragment that has the most padding from start and end time
timePadding = Math.min(time - startTime, endTime - time);
if (bestOverlap <= timePadding) {
bestFragment = fragmentEntity.body;
bestOverlap = timePadding;
}
}
}
});
return bestFragment;
}
public isEndListAppended(type: PlaylistLevelType): boolean {
const lastFragmentEntity = this.endListFragments[type];
return (
lastFragmentEntity !== undefined &&
(lastFragmentEntity.buffered || isPartial(lastFragmentEntity))
);
}
public getState(fragment: Fragment): FragmentState {
const fragKey = getFragmentKey(fragment);
const fragmentEntity = this.fragments[fragKey];
if (fragmentEntity) {
if (!fragmentEntity.buffered) {
return FragmentState.APPENDING;
} else if (isPartial(fragmentEntity)) {
return FragmentState.PARTIAL;
} else {
return FragmentState.OK;
}
}
return FragmentState.NOT_LOADED;
}
private isTimeBuffered(
startPTS: number,
endPTS: number,
timeRange: TimeRanges,
): boolean {
let startTime;
let endTime;
for (let i = 0; i < timeRange.length; i++) {
startTime = timeRange.start(i) - this.bufferPadding;
endTime = timeRange.end(i) + this.bufferPadding;
if (startPTS >= startTime && endPTS <= endTime) {
return true;
}
if (endPTS <= startTime) {
// No need to check the rest of the timeRange as it is in order
return false;
}
}
return false;
}
private onFragLoaded(event: Events.FRAG_LOADED, data: FragLoadedData) {
const { frag, part } = data;
// don't track initsegment (for which sn is not a number)
// don't track frags used for bitrateTest, they're irrelevant.
if (frag.sn === 'initSegment' || frag.bitrateTest) {
return;
}
// Fragment entity `loaded` FragLoadedData is null when loading parts
const loaded = part ? null : data;
const fragKey = getFragmentKey(frag);
this.fragments[fragKey] = {
body: frag,
appendedPTS: null,
loaded,
buffered: false,
range: Object.create(null),
};
}
private onBufferAppended(
event: Events.BUFFER_APPENDED,
data: BufferAppendedData,
) {
const { frag, part, timeRanges } = data;
if (frag.sn === 'initSegment') {
return;
}
const playlistType = frag.type;
if (part) {
let activeParts = this.activePartLists[playlistType];
if (!activeParts) {
this.activePartLists[playlistType] = activeParts = [];
}
activeParts.push(part);
}
// Store the latest timeRanges loaded in the buffer
this.timeRanges = timeRanges;
Object.keys(timeRanges).forEach((elementaryStream: SourceBufferName) => {
const timeRange = timeRanges[elementaryStream] as TimeRanges;
this.detectEvictedFragments(
elementaryStream,
timeRange,
playlistType,
part,
);
});
}
private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
this.detectPartialFragments(data);
}
private hasFragment(fragment: Fragment): boolean {
const fragKey = getFragmentKey(fragment);
return !!this.fragments[fragKey];
}
public hasParts(type: PlaylistLevelType): boolean {
return !!this.activePartLists[type]?.length;
}
public removeFragmentsInRange(
start: number,
end: number,
playlistType: PlaylistLevelType,
withGapOnly?: boolean,
unbufferedOnly?: boolean,
) {
if (withGapOnly && !this.hasGaps) {
return;
}
Object.keys(this.fragments).forEach((key) => {
const fragmentEntity = this.fragments[key];
if (!fragmentEntity) {
return;
}
const frag = fragmentEntity.body;
if (frag.type !== playlistType || (withGapOnly && !frag.gap)) {
return;
}
if (
frag.start < end &&
frag.end > start &&
(fragmentEntity.buffered || unbufferedOnly)
) {
this.removeFragment(frag);
}
});
}
public removeFragment(fragment: Fragment) {
const fragKey = getFragmentKey(fragment);
fragment.stats.loaded = 0;
fragment.clearElementaryStreamInfo();
const activeParts = this.activePartLists[fragment.type];
if (activeParts) {
const snToRemove = fragment.sn;
this.activePartLists[fragment.type] = activeParts.filter(
(part) => part.fragment.sn !== snToRemove,
);
}
delete this.fragments[fragKey];
if (fragment.endList) {
delete this.endListFragments[fragment.type];
}
}
public removeAllFragments() {
this.fragments = Object.create(null);
this.endListFragments = Object.create(null);
this.activePartLists = Object.create(null);
this.hasGaps = false;
}
}
function isPartial(fragmentEntity: FragmentEntity): boolean {
return (
fragmentEntity.buffered &&
(fragmentEntity.body.gap ||
fragmentEntity.range.video?.partial ||
fragmentEntity.range.audio?.partial ||
fragmentEntity.range.audiovideo?.partial)
);
}
function getFragmentKey(fragment: Fragment): string {
return `${fragment.type}_${fragment.level}_${fragment.sn}`;
}

View File

@@ -0,0 +1,372 @@
import type { BufferInfo } from '../utils/buffer-helper';
import { BufferHelper } from '../utils/buffer-helper';
import { ErrorTypes, ErrorDetails } from '../errors';
import { PlaylistLevelType } from '../types/loader';
import { Events } from '../events';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { HlsConfig } from '../config';
import type { Fragment } from '../loader/fragment';
import type { FragmentTracker } from './fragment-tracker';
export const STALL_MINIMUM_DURATION_MS = 250;
export const MAX_START_GAP_JUMP = 2.0;
export const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
export const SKIP_BUFFER_RANGE_START = 0.05;
export default class GapController {
private config: HlsConfig;
private media: HTMLMediaElement | null = null;
private fragmentTracker: FragmentTracker;
private hls: Hls;
private nudgeRetry: number = 0;
private stallReported: boolean = false;
private stalled: number | null = null;
private moved: boolean = false;
private seeking: boolean = false;
constructor(config, media, fragmentTracker, hls) {
this.config = config;
this.media = media;
this.fragmentTracker = fragmentTracker;
this.hls = hls;
}
public destroy() {
this.media = null;
// @ts-ignore
this.hls = this.fragmentTracker = null;
}
/**
* Checks if the playhead is stuck within a gap, and if so, attempts to free it.
* A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
*
* @param lastCurrentTime - Previously read playhead position
*/
public poll(lastCurrentTime: number, activeFrag: Fragment | null) {
const { config, media, stalled } = this;
if (media === null) {
return;
}
const { currentTime, seeking } = media;
const seeked = this.seeking && !seeking;
const beginSeek = !this.seeking && seeking;
this.seeking = seeking;
// The playhead is moving, no-op
if (currentTime !== lastCurrentTime) {
this.moved = true;
if (!seeking) {
this.nudgeRetry = 0;
}
if (stalled !== null) {
// The playhead is now moving, but was previously stalled
if (this.stallReported) {
const stalledDuration = self.performance.now() - stalled;
logger.warn(
`playback not stuck anymore @${currentTime}, after ${Math.round(
stalledDuration,
)}ms`,
);
this.stallReported = false;
}
this.stalled = null;
}
return;
}
// Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
if (beginSeek || seeked) {
this.stalled = null;
return;
}
// The playhead should not be moving
if (
(media.paused && !seeking) ||
media.ended ||
media.playbackRate === 0 ||
!BufferHelper.getBuffered(media).length
) {
this.nudgeRetry = 0;
return;
}
const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
const nextStart = bufferInfo.nextStart || 0;
if (seeking) {
// Waiting for seeking in a buffered range to complete
const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
// Next buffered range is too far ahead to jump to while still seeking
const noBufferGap =
!nextStart ||
(activeFrag && activeFrag.start <= currentTime) ||
(nextStart - currentTime > MAX_START_GAP_JUMP &&
!this.fragmentTracker.getPartialFragment(currentTime));
if (hasEnoughBuffer || noBufferGap) {
return;
}
// Reset moved state when seeking to a point in or before a gap
this.moved = false;
}
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
// The addition poll gives the browser a chance to jump the gap for us
if (!this.moved && this.stalled !== null) {
// There is no playable buffer (seeked, waiting for buffer)
const isBuffered = bufferInfo.len > 0;
if (!isBuffered && !nextStart) {
return;
}
// Jump start gaps within jump threshold
const startJump =
Math.max(nextStart, bufferInfo.start || 0) - currentTime;
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
// that begins over 1 target duration after the video start position.
const level = this.hls.levels
? this.hls.levels[this.hls.currentLevel]
: null;
const isLive = level?.details?.live;
const maxStartGapJump = isLive
? level!.details!.targetduration * 2
: MAX_START_GAP_JUMP;
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
if (!media.paused) {
this._trySkipBufferHole(partialOrGap);
}
return;
}
}
// Start tracking stall time
const tnow = self.performance.now();
if (stalled === null) {
this.stalled = tnow;
return;
}
const stalledDuration = tnow - stalled;
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
// Report stalling after trying to fix
this._reportStall(bufferInfo);
if (!this.media) {
return;
}
}
const bufferedWithHoles = BufferHelper.bufferInfo(
media,
currentTime,
config.maxBufferHole,
);
this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
}
/**
* Detects and attempts to fix known buffer stalling issues.
* @param bufferInfo - The properties of the current buffer.
* @param stalledDurationMs - The amount of time Hls.js has been stalling for.
* @private
*/
private _tryFixBufferStall(
bufferInfo: BufferInfo,
stalledDurationMs: number,
) {
const { config, fragmentTracker, media } = this;
if (media === null) {
return;
}
const currentTime = media.currentTime;
const partial = fragmentTracker.getPartialFragment(currentTime);
if (partial) {
// Try to skip over the buffer hole caused by a partial fragment
// This method isn't limited by the size of the gap between buffered ranges
const targetTime = this._trySkipBufferHole(partial);
// we return here in this case, meaning
// the branch below only executes when we haven't seeked to a new position
if (targetTime || !this.media) {
return;
}
}
// if we haven't had to skip over a buffer hole of a partial fragment
// we may just have to "nudge" the playlist as the browser decoding/rendering engine
// needs to cross some sort of threshold covering all source-buffers content
// to start playing properly.
if (
(bufferInfo.len > config.maxBufferHole ||
(bufferInfo.nextStart &&
bufferInfo.nextStart - currentTime < config.maxBufferHole)) &&
stalledDurationMs > config.highBufferWatchdogPeriod * 1000
) {
logger.warn('Trying to nudge playhead over buffer-hole');
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
// We only try to jump the hole if it's under the configured size
// Reset stalled so to rearm watchdog timer
this.stalled = null;
this._tryNudgeBuffer();
}
}
/**
* Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
* @param bufferLen - The playhead distance from the end of the current buffer segment.
* @private
*/
private _reportStall(bufferInfo: BufferInfo) {
const { hls, media, stallReported } = this;
if (!stallReported && media) {
// Report stalled error once
this.stallReported = true;
const error = new Error(
`Playback stalling at @${
media.currentTime
} due to low buffer (${JSON.stringify(bufferInfo)})`,
);
logger.warn(error.message);
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_STALLED_ERROR,
fatal: false,
error,
buffer: bufferInfo.len,
});
}
}
/**
* Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
* @param partial - The partial fragment found at the current time (where playback is stalling).
* @private
*/
private _trySkipBufferHole(partial: Fragment | null): number {
const { config, hls, media } = this;
if (media === null) {
return 0;
}
// Check if currentTime is between unbuffered regions of partial fragments
const currentTime = media.currentTime;
const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
const startTime =
currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
if (startTime) {
const bufferStarved = bufferInfo.len <= config.maxBufferHole;
const waiting =
bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
const gapLength = startTime - currentTime;
if (gapLength > 0 && (bufferStarved || waiting)) {
// Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
if (gapLength > config.maxBufferHole) {
const { fragmentTracker } = this;
let startGap = false;
if (currentTime === 0) {
const startFrag = fragmentTracker.getAppendedFrag(
0,
PlaylistLevelType.MAIN,
);
if (startFrag && startTime < startFrag.end) {
startGap = true;
}
}
if (!startGap) {
const startProvisioned =
partial ||
fragmentTracker.getAppendedFrag(
currentTime,
PlaylistLevelType.MAIN,
);
if (startProvisioned) {
let moreToLoad = false;
let pos = startProvisioned.end;
while (pos < startTime) {
const provisioned = fragmentTracker.getPartialFragment(pos);
if (provisioned) {
pos += provisioned.duration;
} else {
moreToLoad = true;
break;
}
}
if (moreToLoad) {
return 0;
}
}
}
}
const targetTime = Math.max(
startTime + SKIP_BUFFER_RANGE_START,
currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS,
);
logger.warn(
`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`,
);
this.moved = true;
this.stalled = null;
media.currentTime = targetTime;
if (partial && !partial.gap) {
const error = new Error(
`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`,
);
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
fatal: false,
error,
reason: error.message,
frag: partial,
});
}
return targetTime;
}
}
return 0;
}
/**
* Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
* @private
*/
private _tryNudgeBuffer() {
const { config, hls, media, nudgeRetry } = this;
if (media === null) {
return;
}
const currentTime = media.currentTime;
this.nudgeRetry++;
if (nudgeRetry < config.nudgeMaxRetry) {
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
const error = new Error(
`Nudging 'currentTime' from ${currentTime} to ${targetTime}`,
);
logger.warn(error.message);
media.currentTime = targetTime;
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
error,
fatal: false,
});
} else {
const error = new Error(
`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`,
);
logger.error(error.message);
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_STALLED_ERROR,
error,
fatal: true,
});
}
}
}

View File

@@ -0,0 +1,423 @@
import { Events } from '../events';
import {
sendAddTrackEvent,
clearCurrentCues,
removeCuesInRange,
} from '../utils/texttrack-utils';
import * as ID3 from '../demux/id3';
import {
DateRange,
isDateRangeCueAttribute,
isSCTE35Attribute,
} from '../loader/date-range';
import { MetadataSchema } from '../types/demuxer';
import type {
BufferFlushingData,
FragParsingMetadataData,
LevelUpdatedData,
MediaAttachedData,
} from '../types/events';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
declare global {
interface Window {
WebKitDataCue: VTTCue | void;
}
}
const MIN_CUE_DURATION = 0.25;
function getCueClass(): typeof VTTCue | typeof TextTrackCue | undefined {
if (typeof self === 'undefined') return undefined;
return self.VTTCue || self.TextTrackCue;
}
function createCueWithDataFields(
Cue: typeof VTTCue | typeof TextTrackCue,
startTime: number,
endTime: number,
data: Object,
type?: string,
): VTTCue | TextTrackCue | undefined {
let cue = new Cue(startTime, endTime, '');
try {
(cue as any).value = data;
if (type) {
(cue as any).type = type;
}
} catch (e) {
cue = new Cue(
startTime,
endTime,
JSON.stringify(type ? { type, ...data } : data),
);
}
return cue;
}
// VTTCue latest draft allows an infinite duration, fallback
// to MAX_VALUE if necessary
const MAX_CUE_ENDTIME = (() => {
const Cue = getCueClass();
try {
Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
} catch (e) {
return Number.MAX_VALUE;
}
return Number.POSITIVE_INFINITY;
})();
function dateRangeDateToTimelineSeconds(date: Date, offset: number): number {
return date.getTime() / 1000 - offset;
}
function hexToArrayBuffer(str): ArrayBuffer {
return Uint8Array.from(
str
.replace(/^0x/, '')
.replace(/([\da-fA-F]{2}) ?/g, '0x$1 ')
.replace(/ +$/, '')
.split(' '),
).buffer;
}
class ID3TrackController implements ComponentAPI {
private hls: Hls;
private id3Track: TextTrack | null = null;
private media: HTMLMediaElement | null = null;
private dateRangeCuesAppended: Record<
string,
{
cues: Record<string, VTTCue | TextTrackCue>;
dateRange: DateRange;
durationKnown: boolean;
}
> = {};
constructor(hls) {
this.hls = hls;
this._registerListeners();
}
destroy() {
this._unregisterListeners();
this.id3Track = null;
this.media = null;
this.dateRangeCuesAppended = {};
// @ts-ignore
this.hls = null;
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
}
// Add ID3 metatadata text track.
protected onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData,
): void {
this.media = data.media;
}
protected onMediaDetaching(): void {
if (!this.id3Track) {
return;
}
clearCurrentCues(this.id3Track);
this.id3Track = null;
this.media = null;
this.dateRangeCuesAppended = {};
}
private onManifestLoading() {
this.dateRangeCuesAppended = {};
}
createTrack(media: HTMLMediaElement): TextTrack {
const track = this.getID3Track(media.textTracks) as TextTrack;
track.mode = 'hidden';
return track;
}
getID3Track(textTracks: TextTrackList): TextTrack | void {
if (!this.media) {
return;
}
for (let i = 0; i < textTracks.length; i++) {
const textTrack: TextTrack = textTracks[i];
if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
// send 'addtrack' when reusing the textTrack for metadata,
// same as what we do for captions
sendAddTrackEvent(textTrack, this.media);
return textTrack;
}
}
return this.media.addTextTrack('metadata', 'id3');
}
onFragParsingMetadata(
event: Events.FRAG_PARSING_METADATA,
data: FragParsingMetadataData,
) {
if (!this.media) {
return;
}
const {
hls: {
config: { enableEmsgMetadataCues, enableID3MetadataCues },
},
} = this;
if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
return;
}
const { samples } = data;
// create track dynamically
if (!this.id3Track) {
this.id3Track = this.createTrack(this.media);
}
const Cue = getCueClass();
if (!Cue) {
return;
}
for (let i = 0; i < samples.length; i++) {
const type = samples[i].type;
if (
(type === MetadataSchema.emsg && !enableEmsgMetadataCues) ||
!enableID3MetadataCues
) {
continue;
}
const frames = ID3.getID3Frames(samples[i].data);
if (frames) {
const startTime = samples[i].pts;
let endTime: number = startTime + samples[i].duration;
if (endTime > MAX_CUE_ENDTIME) {
endTime = MAX_CUE_ENDTIME;
}
const timeDiff = endTime - startTime;
if (timeDiff <= 0) {
endTime = startTime + MIN_CUE_DURATION;
}
for (let j = 0; j < frames.length; j++) {
const frame = frames[j];
// Safari doesn't put the timestamp frame in the TextTrack
if (!ID3.isTimeStampFrame(frame)) {
// add a bounds to any unbounded cues
this.updateId3CueEnds(startTime, type);
const cue = createCueWithDataFields(
Cue,
startTime,
endTime,
frame,
type,
);
if (cue) {
this.id3Track.addCue(cue);
}
}
}
}
}
}
updateId3CueEnds(startTime: number, type: MetadataSchema) {
const cues = this.id3Track?.cues;
if (cues) {
for (let i = cues.length; i--; ) {
const cue = cues[i] as any;
if (
cue.type === type &&
cue.startTime < startTime &&
cue.endTime === MAX_CUE_ENDTIME
) {
cue.endTime = startTime;
}
}
}
}
onBufferFlushing(
event: Events.BUFFER_FLUSHING,
{ startOffset, endOffset, type }: BufferFlushingData,
) {
const { id3Track, hls } = this;
if (!hls) {
return;
}
const {
config: { enableEmsgMetadataCues, enableID3MetadataCues },
} = hls;
if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
let predicate;
if (type === 'audio') {
predicate = (cue) =>
(cue as any).type === MetadataSchema.audioId3 &&
enableID3MetadataCues;
} else if (type === 'video') {
predicate = (cue) =>
(cue as any).type === MetadataSchema.emsg && enableEmsgMetadataCues;
} else {
predicate = (cue) =>
((cue as any).type === MetadataSchema.audioId3 &&
enableID3MetadataCues) ||
((cue as any).type === MetadataSchema.emsg && enableEmsgMetadataCues);
}
removeCuesInRange(id3Track, startOffset, endOffset, predicate);
}
}
onLevelUpdated(event: Events.LEVEL_UPDATED, { details }: LevelUpdatedData) {
if (
!this.media ||
!details.hasProgramDateTime ||
!this.hls.config.enableDateRangeMetadataCues
) {
return;
}
const { dateRangeCuesAppended, id3Track } = this;
const { dateRanges } = details;
const ids = Object.keys(dateRanges);
// Remove cues from track not found in details.dateRanges
if (id3Track) {
const idsToRemove = Object.keys(dateRangeCuesAppended).filter(
(id) => !ids.includes(id),
);
for (let i = idsToRemove.length; i--; ) {
const id = idsToRemove[i];
Object.keys(dateRangeCuesAppended[id].cues).forEach((key) => {
id3Track.removeCue(dateRangeCuesAppended[id].cues[key]);
});
delete dateRangeCuesAppended[id];
}
}
// Exit if the playlist does not have Date Ranges or does not have Program Date Time
const lastFragment = details.fragments[details.fragments.length - 1];
if (ids.length === 0 || !Number.isFinite(lastFragment?.programDateTime)) {
return;
}
if (!this.id3Track) {
this.id3Track = this.createTrack(this.media);
}
const dateTimeOffset =
(lastFragment.programDateTime as number) / 1000 - lastFragment.start;
const Cue = getCueClass();
for (let i = 0; i < ids.length; i++) {
const id = ids[i];
const dateRange = dateRanges[id];
const startTime = dateRangeDateToTimelineSeconds(
dateRange.startDate,
dateTimeOffset,
);
// Process DateRanges to determine end-time (known DURATION, END-DATE, or END-ON-NEXT)
const appendedDateRangeCues = dateRangeCuesAppended[id];
const cues = appendedDateRangeCues?.cues || {};
let durationKnown = appendedDateRangeCues?.durationKnown || false;
let endTime = MAX_CUE_ENDTIME;
const endDate = dateRange.endDate;
if (endDate) {
endTime = dateRangeDateToTimelineSeconds(endDate, dateTimeOffset);
durationKnown = true;
} else if (dateRange.endOnNext && !durationKnown) {
const nextDateRangeWithSameClass = ids.reduce(
(candidateDateRange: DateRange | null, id) => {
if (id !== dateRange.id) {
const otherDateRange = dateRanges[id];
if (
otherDateRange.class === dateRange.class &&
otherDateRange.startDate > dateRange.startDate &&
(!candidateDateRange ||
dateRange.startDate < candidateDateRange.startDate)
) {
return otherDateRange;
}
}
return candidateDateRange;
},
null,
);
if (nextDateRangeWithSameClass) {
endTime = dateRangeDateToTimelineSeconds(
nextDateRangeWithSameClass.startDate,
dateTimeOffset,
);
durationKnown = true;
}
}
// Create TextTrack Cues for each MetadataGroup Item (select DateRange attribute)
// This is to emulate Safari HLS playback handling of DateRange tags
const attributes = Object.keys(dateRange.attr);
for (let j = 0; j < attributes.length; j++) {
const key = attributes[j];
if (!isDateRangeCueAttribute(key)) {
continue;
}
const cue = cues[key];
if (cue) {
if (durationKnown && !appendedDateRangeCues.durationKnown) {
cue.endTime = endTime;
}
} else if (Cue) {
let data = dateRange.attr[key];
if (isSCTE35Attribute(key)) {
data = hexToArrayBuffer(data);
}
const cue = createCueWithDataFields(
Cue,
startTime,
endTime,
{ key, data },
MetadataSchema.dateRange,
);
if (cue) {
cue.id = id;
this.id3Track.addCue(cue);
cues[key] = cue;
}
}
}
// Keep track of processed DateRanges by ID for updating cues with new DateRange tag attributes
dateRangeCuesAppended[id] = {
cues,
dateRange,
durationKnown,
};
}
}
}
export default ID3TrackController;

View File

@@ -0,0 +1,262 @@
import { LevelDetails } from '../loader/level-details';
import { ErrorDetails } from '../errors';
import { Events } from '../events';
import type {
ErrorData,
LevelUpdatedData,
MediaAttachingData,
} from '../types/events';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { HlsConfig } from '../config';
export default class LatencyController implements ComponentAPI {
private hls: Hls;
private readonly config: HlsConfig;
private media: HTMLMediaElement | null = null;
private levelDetails: LevelDetails | null = null;
private currentTime: number = 0;
private stallCount: number = 0;
private _latency: number | null = null;
private timeupdateHandler = () => this.timeupdate();
constructor(hls: Hls) {
this.hls = hls;
this.config = hls.config;
this.registerListeners();
}
get latency(): number {
return this._latency || 0;
}
get maxLatency(): number {
const { config, levelDetails } = this;
if (config.liveMaxLatencyDuration !== undefined) {
return config.liveMaxLatencyDuration;
}
return levelDetails
? config.liveMaxLatencyDurationCount * levelDetails.targetduration
: 0;
}
get targetLatency(): number | null {
const { levelDetails } = this;
if (levelDetails === null) {
return null;
}
const { holdBack, partHoldBack, targetduration } = levelDetails;
const { liveSyncDuration, liveSyncDurationCount, lowLatencyMode } =
this.config;
const userConfig = this.hls.userConfig;
let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
if (
userConfig.liveSyncDuration ||
userConfig.liveSyncDurationCount ||
targetLatency === 0
) {
targetLatency =
liveSyncDuration !== undefined
? liveSyncDuration
: liveSyncDurationCount * targetduration;
}
const maxLiveSyncOnStallIncrease = targetduration;
const liveSyncOnStallIncrease = 1.0;
return (
targetLatency +
Math.min(
this.stallCount * liveSyncOnStallIncrease,
maxLiveSyncOnStallIncrease,
)
);
}
get liveSyncPosition(): number | null {
const liveEdge = this.estimateLiveEdge();
const targetLatency = this.targetLatency;
const levelDetails = this.levelDetails;
if (liveEdge === null || targetLatency === null || levelDetails === null) {
return null;
}
const edge = levelDetails.edge;
const syncPosition = liveEdge - targetLatency - this.edgeStalled;
const min = edge - levelDetails.totalduration;
const max =
edge -
((this.config.lowLatencyMode && levelDetails.partTarget) ||
levelDetails.targetduration);
return Math.min(Math.max(min, syncPosition), max);
}
get drift(): number {
const { levelDetails } = this;
if (levelDetails === null) {
return 1;
}
return levelDetails.drift;
}
get edgeStalled(): number {
const { levelDetails } = this;
if (levelDetails === null) {
return 0;
}
const maxLevelUpdateAge =
((this.config.lowLatencyMode && levelDetails.partTarget) ||
levelDetails.targetduration) * 3;
return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
}
private get forwardBufferLength(): number {
const { media, levelDetails } = this;
if (!media || !levelDetails) {
return 0;
}
const bufferedRanges = media.buffered.length;
return (
(bufferedRanges
? media.buffered.end(bufferedRanges - 1)
: levelDetails.edge) - this.currentTime
);
}
public destroy(): void {
this.unregisterListeners();
this.onMediaDetaching();
this.levelDetails = null;
// @ts-ignore
this.hls = this.timeupdateHandler = null;
}
private registerListeners() {
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
this.hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
this.hls.off(Events.ERROR, this.onError, this);
}
private onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachingData,
) {
this.media = data.media;
this.media.addEventListener('timeupdate', this.timeupdateHandler);
}
private onMediaDetaching() {
if (this.media) {
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
this.media = null;
}
}
private onManifestLoading() {
this.levelDetails = null;
this._latency = null;
this.stallCount = 0;
}
private onLevelUpdated(
event: Events.LEVEL_UPDATED,
{ details }: LevelUpdatedData,
) {
this.levelDetails = details;
if (details.advanced) {
this.timeupdate();
}
if (!details.live && this.media) {
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
}
}
private onError(event: Events.ERROR, data: ErrorData) {
if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) {
return;
}
this.stallCount++;
if (this.levelDetails?.live) {
logger.warn(
'[playback-rate-controller]: Stall detected, adjusting target latency',
);
}
}
private timeupdate() {
const { media, levelDetails } = this;
if (!media || !levelDetails) {
return;
}
this.currentTime = media.currentTime;
const latency = this.computeLatency();
if (latency === null) {
return;
}
this._latency = latency;
// Adapt playbackRate to meet target latency in low-latency mode
const { lowLatencyMode, maxLiveSyncPlaybackRate } = this.config;
if (
!lowLatencyMode ||
maxLiveSyncPlaybackRate === 1 ||
!levelDetails.live
) {
return;
}
const targetLatency = this.targetLatency;
if (targetLatency === null) {
return;
}
const distanceFromTarget = latency - targetLatency;
// Only adjust playbackRate when within one target duration of targetLatency
// and more than one second from under-buffering.
// Playback further than one target duration from target can be considered DVR playback.
const liveMinLatencyDuration = Math.min(
this.maxLatency,
targetLatency + levelDetails.targetduration,
);
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
if (
inLiveRange &&
distanceFromTarget > 0.05 &&
this.forwardBufferLength > 1
) {
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
const rate =
Math.round(
(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled))) *
20,
) / 20;
media.playbackRate = Math.min(max, Math.max(1, rate));
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
media.playbackRate = 1;
}
}
private estimateLiveEdge(): number | null {
const { levelDetails } = this;
if (levelDetails === null) {
return null;
}
return levelDetails.edge + levelDetails.age;
}
private computeLatency(): number | null {
const liveEdge = this.estimateLiveEdge();
if (liveEdge === null) {
return null;
}
return liveEdge - this.currentTime;
}
}

View File

@@ -0,0 +1,693 @@
import {
ManifestLoadedData,
ManifestParsedData,
LevelLoadedData,
ErrorData,
LevelSwitchingData,
LevelsUpdatedData,
ManifestLoadingData,
FragBufferedData,
} from '../types/events';
import { Level, VideoRangeValues, isVideoRange } from '../types/level';
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import {
areCodecsMediaSourceSupported,
codecsSetSelectionPreferenceValue,
convertAVC1ToAVCOTI,
getCodecCompatibleName,
videoCodecPreferenceValue,
} from '../utils/codecs';
import BasePlaylistController from './base-playlist-controller';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import ContentSteeringController from './content-steering-controller';
import { reassignFragmentLevelIndexes } from '../utils/level-helper';
import { hlsDefaultConfig } from '../config';
import type Hls from '../hls';
import type { HlsUrlParameters, LevelParsed } from '../types/level';
import type { MediaPlaylist } from '../types/media-playlist';
let chromeOrFirefox: boolean;
export default class LevelController extends BasePlaylistController {
private _levels: Level[] = [];
private _firstLevel: number = -1;
private _maxAutoLevel: number = -1;
private _startLevel?: number;
private currentLevel: Level | null = null;
private currentLevelIndex: number = -1;
private manualLevelIndex: number = -1;
private steering: ContentSteeringController | null;
public onParsedComplete!: Function;
constructor(
hls: Hls,
contentSteeringController: ContentSteeringController | null,
) {
super(hls, '[level-controller]');
this.steering = contentSteeringController;
this._registerListeners();
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.on(Events.ERROR, this.onError, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this._unregisterListeners();
this.steering = null;
this.resetLevels();
super.destroy();
}
public stopLoad(): void {
const levels = this._levels;
// clean up live level details to force reload them, and reset load errors
levels.forEach((level) => {
level.loadError = 0;
level.fragmentError = 0;
});
super.stopLoad();
}
private resetLevels() {
this._startLevel = undefined;
this.manualLevelIndex = -1;
this.currentLevelIndex = -1;
this.currentLevel = null;
this._levels = [];
this._maxAutoLevel = -1;
}
private onManifestLoading(
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData,
) {
this.resetLevels();
}
protected onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData,
) {
const preferManagedMediaSource = this.hls.config.preferManagedMediaSource;
const levels: Level[] = [];
const redundantSet: { [key: string]: Level } = {};
const generatePathwaySet: { [key: string]: number } = {};
let resolutionFound = false;
let videoCodecFound = false;
let audioCodecFound = false;
data.levels.forEach((levelParsed: LevelParsed) => {
const attributes = levelParsed.attrs;
// erase audio codec info if browser does not support mp4a.40.34.
// demuxer will autodetect codec and fallback to mpeg/audio
let { audioCodec, videoCodec } = levelParsed;
if (audioCodec?.indexOf('mp4a.40.34') !== -1) {
chromeOrFirefox ||= /chrome|firefox/i.test(navigator.userAgent);
if (chromeOrFirefox) {
levelParsed.audioCodec = audioCodec = undefined;
}
}
if (audioCodec) {
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(
audioCodec,
preferManagedMediaSource,
);
}
if (videoCodec?.indexOf('avc1') === 0) {
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
}
// only keep levels with supported audio/video codecs
const { width, height, unknownCodecs } = levelParsed;
resolutionFound ||= !!(width && height);
videoCodecFound ||= !!videoCodec;
audioCodecFound ||= !!audioCodec;
if (
unknownCodecs?.length ||
(audioCodec &&
!areCodecsMediaSourceSupported(
audioCodec,
'audio',
preferManagedMediaSource,
)) ||
(videoCodec &&
!areCodecsMediaSourceSupported(
videoCodec,
'video',
preferManagedMediaSource,
))
) {
return;
}
const {
CODECS,
'FRAME-RATE': FRAMERATE,
'HDCP-LEVEL': HDCP,
'PATHWAY-ID': PATHWAY,
RESOLUTION,
'VIDEO-RANGE': VIDEO_RANGE,
} = attributes;
const contentSteeringPrefix = `${PATHWAY || '.'}-`;
const levelKey = `${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}-${VIDEO_RANGE}-${HDCP}`;
if (!redundantSet[levelKey]) {
const level = new Level(levelParsed);
redundantSet[levelKey] = level;
generatePathwaySet[levelKey] = 1;
levels.push(level);
} else if (
redundantSet[levelKey].uri !== levelParsed.url &&
!levelParsed.attrs['PATHWAY-ID']
) {
// Assign Pathway IDs to Redundant Streams (default Pathways is ".". Redundant Streams "..", "...", and so on.)
// Content Steering controller to handles Pathway fallback on error
const pathwayCount = (generatePathwaySet[levelKey] += 1);
levelParsed.attrs['PATHWAY-ID'] = new Array(pathwayCount + 1).join('.');
const level = new Level(levelParsed);
redundantSet[levelKey] = level;
levels.push(level);
} else {
redundantSet[levelKey].addGroupId('audio', attributes.AUDIO);
redundantSet[levelKey].addGroupId('text', attributes.SUBTITLES);
}
});
this.filterAndSortMediaOptions(
levels,
data,
resolutionFound,
videoCodecFound,
audioCodecFound,
);
}
private filterAndSortMediaOptions(
filteredLevels: Level[],
data: ManifestLoadedData,
resolutionFound: boolean,
videoCodecFound: boolean,
audioCodecFound: boolean,
) {
let audioTracks: MediaPlaylist[] = [];
let subtitleTracks: MediaPlaylist[] = [];
let levels = filteredLevels;
// remove audio-only and invalid video-range levels if we also have levels with video codecs or RESOLUTION signalled
if ((resolutionFound || videoCodecFound) && audioCodecFound) {
levels = levels.filter(
({ videoCodec, videoRange, width, height }) =>
(!!videoCodec || !!(width && height)) && isVideoRange(videoRange),
);
}
if (levels.length === 0) {
// Dispatch error after MANIFEST_LOADED is done propagating
Promise.resolve().then(() => {
if (this.hls) {
if (data.levels.length) {
this.warn(
`One or more CODECS in variant not supported: ${JSON.stringify(
data.levels[0].attrs,
)}`,
);
}
const error = new Error(
'no level with compatible codecs found in manifest',
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
fatal: true,
url: data.url,
error,
reason: error.message,
});
}
});
return;
}
if (data.audioTracks) {
const { preferManagedMediaSource } = this.hls.config;
audioTracks = data.audioTracks.filter(
(track) =>
!track.audioCodec ||
areCodecsMediaSourceSupported(
track.audioCodec,
'audio',
preferManagedMediaSource,
),
);
// Assign ids after filtering as array indices by group-id
assignTrackIdsByGroup(audioTracks);
}
if (data.subtitles) {
subtitleTracks = data.subtitles;
assignTrackIdsByGroup(subtitleTracks);
}
// start bitrate is the first bitrate of the manifest
const unsortedLevels = levels.slice(0);
// sort levels from lowest to highest
levels.sort((a, b) => {
if (a.attrs['HDCP-LEVEL'] !== b.attrs['HDCP-LEVEL']) {
return (a.attrs['HDCP-LEVEL'] || '') > (b.attrs['HDCP-LEVEL'] || '')
? 1
: -1;
}
// sort on height before bitrate for cap-level-controller
if (resolutionFound && a.height !== b.height) {
return a.height - b.height;
}
if (a.frameRate !== b.frameRate) {
return a.frameRate - b.frameRate;
}
if (a.videoRange !== b.videoRange) {
return (
VideoRangeValues.indexOf(a.videoRange) -
VideoRangeValues.indexOf(b.videoRange)
);
}
if (a.videoCodec !== b.videoCodec) {
const valueA = videoCodecPreferenceValue(a.videoCodec);
const valueB = videoCodecPreferenceValue(b.videoCodec);
if (valueA !== valueB) {
return valueB - valueA;
}
}
if (a.uri === b.uri && a.codecSet !== b.codecSet) {
const valueA = codecsSetSelectionPreferenceValue(a.codecSet);
const valueB = codecsSetSelectionPreferenceValue(b.codecSet);
if (valueA !== valueB) {
return valueB - valueA;
}
}
if (a.averageBitrate !== b.averageBitrate) {
return a.averageBitrate - b.averageBitrate;
}
return 0;
});
let firstLevelInPlaylist = unsortedLevels[0];
if (this.steering) {
levels = this.steering.filterParsedLevels(levels);
if (levels.length !== unsortedLevels.length) {
for (let i = 0; i < unsortedLevels.length; i++) {
if (unsortedLevels[i].pathwayId === levels[0].pathwayId) {
firstLevelInPlaylist = unsortedLevels[i];
break;
}
}
}
}
this._levels = levels;
// find index of first level in sorted levels
for (let i = 0; i < levels.length; i++) {
if (levels[i] === firstLevelInPlaylist) {
this._firstLevel = i;
const firstLevelBitrate = firstLevelInPlaylist.bitrate;
const bandwidthEstimate = this.hls.bandwidthEstimate;
this.log(
`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelBitrate}`,
);
// Update default bwe to first variant bitrate as long it has not been configured or set
if (this.hls.userConfig?.abrEwmaDefaultEstimate === undefined) {
const startingBwEstimate = Math.min(
firstLevelBitrate,
this.hls.config.abrEwmaDefaultEstimateMax,
);
if (
startingBwEstimate > bandwidthEstimate &&
bandwidthEstimate === hlsDefaultConfig.abrEwmaDefaultEstimate
) {
this.hls.bandwidthEstimate = startingBwEstimate;
}
}
break;
}
}
// Audio is only alternate if manifest include a URI along with the audio group tag,
// and this is not an audio-only stream where levels contain audio-only
const audioOnly = audioCodecFound && !videoCodecFound;
const edata: ManifestParsedData = {
levels,
audioTracks,
subtitleTracks,
sessionData: data.sessionData,
sessionKeys: data.sessionKeys,
firstLevel: this._firstLevel,
stats: data.stats,
audio: audioCodecFound,
video: videoCodecFound,
altAudio: !audioOnly && audioTracks.some((t) => !!t.url),
};
this.hls.trigger(Events.MANIFEST_PARSED, edata);
// Initiate loading after all controllers have received MANIFEST_PARSED
if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) {
this.hls.startLoad(this.hls.config.startPosition);
}
}
get levels(): Level[] | null {
if (this._levels.length === 0) {
return null;
}
return this._levels;
}
get level(): number {
return this.currentLevelIndex;
}
set level(newLevel: number) {
const levels = this._levels;
if (levels.length === 0) {
return;
}
// check if level idx is valid
if (newLevel < 0 || newLevel >= levels.length) {
// invalid level id given, trigger error
const error = new Error('invalid level idx');
const fatal = newLevel < 0;
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.OTHER_ERROR,
details: ErrorDetails.LEVEL_SWITCH_ERROR,
level: newLevel,
fatal,
error,
reason: error.message,
});
if (fatal) {
return;
}
newLevel = Math.min(newLevel, levels.length - 1);
}
const lastLevelIndex = this.currentLevelIndex;
const lastLevel = this.currentLevel;
const lastPathwayId = lastLevel ? lastLevel.attrs['PATHWAY-ID'] : undefined;
const level = levels[newLevel];
const pathwayId = level.attrs['PATHWAY-ID'];
this.currentLevelIndex = newLevel;
this.currentLevel = level;
if (
lastLevelIndex === newLevel &&
level.details &&
lastLevel &&
lastPathwayId === pathwayId
) {
return;
}
this.log(
`Switching to level ${newLevel} (${
level.height ? level.height + 'p ' : ''
}${level.videoRange ? level.videoRange + ' ' : ''}${
level.codecSet ? level.codecSet + ' ' : ''
}@${level.bitrate})${
pathwayId ? ' with Pathway ' + pathwayId : ''
} from level ${lastLevelIndex}${
lastPathwayId ? ' with Pathway ' + lastPathwayId : ''
}`,
);
const levelSwitchingData: LevelSwitchingData = {
level: newLevel,
attrs: level.attrs,
details: level.details,
bitrate: level.bitrate,
averageBitrate: level.averageBitrate,
maxBitrate: level.maxBitrate,
realBitrate: level.realBitrate,
width: level.width,
height: level.height,
codecSet: level.codecSet,
audioCodec: level.audioCodec,
videoCodec: level.videoCodec,
audioGroups: level.audioGroups,
subtitleGroups: level.subtitleGroups,
loaded: level.loaded,
loadError: level.loadError,
fragmentError: level.fragmentError,
name: level.name,
id: level.id,
uri: level.uri,
url: level.url,
urlId: 0,
audioGroupIds: level.audioGroupIds,
textGroupIds: level.textGroupIds,
};
this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData);
// check if we need to load playlist for this level
const levelDetails = level.details;
if (!levelDetails || levelDetails.live) {
// level not retrieved yet, or live playlist we need to (re)load it
const hlsUrlParameters = this.switchParams(
level.uri,
lastLevel?.details,
levelDetails,
);
this.loadPlaylist(hlsUrlParameters);
}
}
get manualLevel(): number {
return this.manualLevelIndex;
}
set manualLevel(newLevel) {
this.manualLevelIndex = newLevel;
if (this._startLevel === undefined) {
this._startLevel = newLevel;
}
if (newLevel !== -1) {
this.level = newLevel;
}
}
get firstLevel(): number {
return this._firstLevel;
}
set firstLevel(newLevel) {
this._firstLevel = newLevel;
}
get startLevel(): number {
// Setting hls.startLevel (this._startLevel) overrides config.startLevel
if (this._startLevel === undefined) {
const configStartLevel = this.hls.config.startLevel;
if (configStartLevel !== undefined) {
return configStartLevel;
}
return this.hls.firstAutoLevel;
}
return this._startLevel;
}
set startLevel(newLevel: number) {
this._startLevel = newLevel;
}
protected onError(event: Events.ERROR, data: ErrorData) {
if (data.fatal || !data.context) {
return;
}
if (
data.context.type === PlaylistContextType.LEVEL &&
data.context.level === this.level
) {
this.checkRetry(data);
}
}
// reset errors on the successful load of a fragment
protected onFragBuffered(
event: Events.FRAG_BUFFERED,
{ frag }: FragBufferedData,
) {
if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
const el = frag.elementaryStreams;
if (!Object.keys(el).some((type) => !!el[type])) {
return;
}
const level = this._levels[frag.level];
if (level?.loadError) {
this.log(
`Resetting level error count of ${level.loadError} on frag buffered`,
);
level.loadError = 0;
}
}
}
protected onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
const { level, details } = data;
const curLevel = this._levels[level];
if (!curLevel) {
this.warn(`Invalid level index ${level}`);
if (data.deliveryDirectives?.skip) {
details.deltaUpdateFailed = true;
}
return;
}
// only process level loaded events matching with expected level
if (level === this.currentLevelIndex) {
// reset level load error counter on successful level loaded only if there is no issues with fragments
if (curLevel.fragmentError === 0) {
curLevel.loadError = 0;
}
this.playlistLoaded(level, data, curLevel.details);
} else if (data.deliveryDirectives?.skip) {
// received a delta playlist update that cannot be merged
details.deltaUpdateFailed = true;
}
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters) {
super.loadPlaylist();
const currentLevelIndex = this.currentLevelIndex;
const currentLevel = this.currentLevel;
if (currentLevel && this.shouldLoadPlaylist(currentLevel)) {
let url = currentLevel.uri;
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`,
);
}
}
const pathwayId = currentLevel.attrs['PATHWAY-ID'];
this.log(
`Loading level index ${currentLevelIndex}${
hlsUrlParameters?.msn !== undefined
? ' at sn ' +
hlsUrlParameters.msn +
' part ' +
hlsUrlParameters.part
: ''
} with${pathwayId ? ' Pathway ' + pathwayId : ''} ${url}`,
);
// console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
// console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
this.clearTimer();
this.hls.trigger(Events.LEVEL_LOADING, {
url,
level: currentLevelIndex,
pathwayId: currentLevel.attrs['PATHWAY-ID'],
id: 0, // Deprecated Level urlId
deliveryDirectives: hlsUrlParameters || null,
});
}
}
get nextLoadLevel() {
if (this.manualLevelIndex !== -1) {
return this.manualLevelIndex;
} else {
return this.hls.nextAutoLevel;
}
}
set nextLoadLevel(nextLevel) {
this.level = nextLevel;
if (this.manualLevelIndex === -1) {
this.hls.nextAutoLevel = nextLevel;
}
}
removeLevel(levelIndex: number) {
const levels = this._levels.filter((level, index) => {
if (index !== levelIndex) {
return true;
}
if (this.steering) {
this.steering.removeLevel(level);
}
if (level === this.currentLevel) {
this.currentLevel = null;
this.currentLevelIndex = -1;
if (level.details) {
level.details.fragments.forEach((f) => (f.level = -1));
}
}
return false;
});
reassignFragmentLevelIndexes(levels);
this._levels = levels;
if (this.currentLevelIndex > -1 && this.currentLevel?.details) {
this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
}
this.hls.trigger(Events.LEVELS_UPDATED, { levels });
}
private onLevelsUpdated(
event: Events.LEVELS_UPDATED,
{ levels }: LevelsUpdatedData,
) {
this._levels = levels;
}
public checkMaxAutoUpdated() {
const { autoLevelCapping, maxAutoLevel, maxHdcpLevel } = this.hls;
if (this._maxAutoLevel !== maxAutoLevel) {
this._maxAutoLevel = maxAutoLevel;
this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
autoLevelCapping,
levels: this.levels,
maxAutoLevel,
minAutoLevel: this.hls.minAutoLevel,
maxHdcpLevel,
});
}
}
}
function assignTrackIdsByGroup(tracks: MediaPlaylist[]): void {
const groups = {};
tracks.forEach((track) => {
const groupId = track.groupId || '';
track.id = groups[groupId] = groups[groupId] || 0;
groups[groupId]++;
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,551 @@
import { Events } from '../events';
import { Bufferable, BufferHelper } from '../utils/buffer-helper';
import { findFragmentByPTS } from './fragment-finders';
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
import { addSliding } from '../utils/level-helper';
import { FragmentState } from './fragment-tracker';
import BaseStreamController, { State } from './base-stream-controller';
import { PlaylistLevelType } from '../types/loader';
import { Level } from '../types/level';
import { subtitleOptionsIdentical } from '../utils/media-option-attributes';
import { ErrorDetails, ErrorTypes } from '../errors';
import type { NetworkComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { FragmentTracker } from './fragment-tracker';
import type KeyLoader from '../loader/key-loader';
import type { LevelDetails } from '../loader/level-details';
import type { Fragment } from '../loader/fragment';
import type {
ErrorData,
FragLoadedData,
SubtitleFragProcessed,
SubtitleTracksUpdatedData,
TrackLoadedData,
TrackSwitchedData,
BufferFlushingData,
LevelLoadedData,
FragBufferedData,
} from '../types/events';
const TICK_INTERVAL = 500; // how often to tick in ms
interface TimeRange {
start: number;
end: number;
}
export class SubtitleStreamController
extends BaseStreamController
implements NetworkComponentAPI
{
private currentTrackId: number = -1;
private tracksBuffered: Array<TimeRange[]> = [];
private mainDetails: LevelDetails | null = null;
constructor(
hls: Hls,
fragmentTracker: FragmentTracker,
keyLoader: KeyLoader,
) {
super(
hls,
fragmentTracker,
keyLoader,
'[subtitle-stream-controller]',
PlaylistLevelType.SUBTITLE,
);
this._registerListeners();
}
protected onHandlerDestroying() {
this._unregisterListeners();
super.onHandlerDestroying();
this.mainDetails = null;
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.ERROR, this.onError, this);
hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.on(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.on(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.ERROR, this.onError, this);
hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.off(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.off(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
startLoad(startPosition: number) {
this.stopLoad();
this.state = State.IDLE;
this.setInterval(TICK_INTERVAL);
this.nextLoadPosition =
this.startPosition =
this.lastCurrentTime =
startPosition;
this.tick();
}
onManifestLoading() {
this.mainDetails = null;
this.fragmentTracker.removeAllFragments();
}
onMediaDetaching(): void {
this.tracksBuffered = [];
super.onMediaDetaching();
}
onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
this.mainDetails = data.details;
}
onSubtitleFragProcessed(
event: Events.SUBTITLE_FRAG_PROCESSED,
data: SubtitleFragProcessed,
) {
const { frag, success } = data;
this.fragPrevious = frag;
this.state = State.IDLE;
if (!success) {
return;
}
const buffered = this.tracksBuffered[this.currentTrackId];
if (!buffered) {
return;
}
// Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo
// so we can re-use the logic used to detect how much has been buffered
let timeRange: TimeRange | undefined;
const fragStart = frag.start;
for (let i = 0; i < buffered.length; i++) {
if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) {
timeRange = buffered[i];
break;
}
}
const fragEnd = frag.start + frag.duration;
if (timeRange) {
timeRange.end = fragEnd;
} else {
timeRange = {
start: fragStart,
end: fragEnd,
};
buffered.push(timeRange);
}
this.fragmentTracker.fragBuffered(frag);
this.fragBufferedComplete(frag, null);
}
onBufferFlushing(event: Events.BUFFER_FLUSHING, data: BufferFlushingData) {
const { startOffset, endOffset } = data;
if (startOffset === 0 && endOffset !== Number.POSITIVE_INFINITY) {
const endOffsetSubtitles = endOffset - 1;
if (endOffsetSubtitles <= 0) {
return;
}
data.endOffsetSubtitles = Math.max(0, endOffsetSubtitles);
this.tracksBuffered.forEach((buffered) => {
for (let i = 0; i < buffered.length; ) {
if (buffered[i].end <= endOffsetSubtitles) {
buffered.shift();
continue;
} else if (buffered[i].start < endOffsetSubtitles) {
buffered[i].start = endOffsetSubtitles;
} else {
break;
}
i++;
}
});
this.fragmentTracker.removeFragmentsInRange(
startOffset,
endOffsetSubtitles,
PlaylistLevelType.SUBTITLE,
);
}
}
onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
if (!this.loadedmetadata && data.frag.type === PlaylistLevelType.MAIN) {
if (this.media?.buffered.length) {
this.loadedmetadata = true;
}
}
}
// If something goes wrong, proceed to next frag, if we were processing one.
onError(event: Events.ERROR, data: ErrorData) {
const frag = data.frag;
if (frag?.type === PlaylistLevelType.SUBTITLE) {
if (data.details === ErrorDetails.FRAG_GAP) {
this.fragmentTracker.fragBuffered(frag, true);
}
if (this.fragCurrent) {
this.fragCurrent.abortRequests();
}
if (this.state !== State.STOPPED) {
this.state = State.IDLE;
}
}
}
// Got all new subtitle levels.
onSubtitleTracksUpdated(
event: Events.SUBTITLE_TRACKS_UPDATED,
{ subtitleTracks }: SubtitleTracksUpdatedData,
) {
if (this.levels && subtitleOptionsIdentical(this.levels, subtitleTracks)) {
this.levels = subtitleTracks.map(
(mediaPlaylist) => new Level(mediaPlaylist),
);
return;
}
this.tracksBuffered = [];
this.levels = subtitleTracks.map((mediaPlaylist) => {
const level = new Level(mediaPlaylist);
this.tracksBuffered[level.id] = [];
return level;
});
this.fragmentTracker.removeFragmentsInRange(
0,
Number.POSITIVE_INFINITY,
PlaylistLevelType.SUBTITLE,
);
this.fragPrevious = null;
this.mediaBuffer = null;
}
onSubtitleTrackSwitch(
event: Events.SUBTITLE_TRACK_SWITCH,
data: TrackSwitchedData,
) {
this.currentTrackId = data.id;
if (!this.levels?.length || this.currentTrackId === -1) {
this.clearInterval();
return;
}
// Check if track has the necessary details to load fragments
const currentTrack = this.levels[this.currentTrackId];
if (currentTrack?.details) {
this.mediaBuffer = this.mediaBufferTimeRanges;
} else {
this.mediaBuffer = null;
}
if (currentTrack) {
this.setInterval(TICK_INTERVAL);
}
}
// Got a new set of subtitle fragments.
onSubtitleTrackLoaded(
event: Events.SUBTITLE_TRACK_LOADED,
data: TrackLoadedData,
) {
const { currentTrackId, levels } = this;
const { details: newDetails, id: trackId } = data;
if (!levels) {
this.warn(`Subtitle tracks were reset while loading level ${trackId}`);
return;
}
const track: Level = levels[trackId];
if (trackId >= levels.length || !track) {
return;
}
this.log(
`Subtitle track ${trackId} loaded [${newDetails.startSN},${
newDetails.endSN
}]${
newDetails.lastPartSn
? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`
: ''
},duration:${newDetails.totalduration}`,
);
this.mediaBuffer = this.mediaBufferTimeRanges;
let sliding = 0;
if (newDetails.live || track.details?.live) {
const mainDetails = this.mainDetails;
if (newDetails.deltaUpdateFailed || !mainDetails) {
return;
}
const mainSlidingStartFragment = mainDetails.fragments[0];
if (!track.details) {
if (newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
alignMediaPlaylistByPDT(newDetails, mainDetails);
sliding = newDetails.fragments[0].start;
} else if (mainSlidingStartFragment) {
// line up live playlist with main so that fragments in range are loaded
sliding = mainSlidingStartFragment.start;
addSliding(newDetails, sliding);
}
} else {
sliding = this.alignPlaylists(
newDetails,
track.details,
this.levelLastLoaded?.details,
);
if (sliding === 0 && mainSlidingStartFragment) {
// realign with main when there is no overlap with last refresh
sliding = mainSlidingStartFragment.start;
addSliding(newDetails, sliding);
}
}
}
track.details = newDetails;
this.levelLastLoaded = track;
if (trackId !== currentTrackId) {
return;
}
if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
this.setStartPosition(this.mainDetails || newDetails, sliding);
}
// trigger handler right now
this.tick();
// If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload
if (
newDetails.live &&
!this.fragCurrent &&
this.media &&
this.state === State.IDLE
) {
const foundFrag = findFragmentByPTS(
null,
newDetails.fragments,
this.media.currentTime,
0,
);
if (!foundFrag) {
this.warn('Subtitle playlist not aligned with playback');
track.details = undefined;
}
}
}
_handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
const { frag, payload } = fragLoadedData;
const decryptData = frag.decryptdata;
const hls = this.hls;
if (this.fragContextChanged(frag)) {
return;
}
// check to see if the payload needs to be decrypted
if (
payload &&
payload.byteLength > 0 &&
decryptData?.key &&
decryptData.iv &&
decryptData.method === 'AES-128'
) {
const startTime = performance.now();
// decrypt the subtitles
this.decrypter
.decrypt(
new Uint8Array(payload),
decryptData.key.buffer,
decryptData.iv.buffer,
)
.catch((err) => {
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_DECRYPT_ERROR,
fatal: false,
error: err,
reason: err.message,
frag,
});
throw err;
})
.then((decryptedData) => {
const endTime = performance.now();
hls.trigger(Events.FRAG_DECRYPTED, {
frag,
payload: decryptedData,
stats: {
tstart: startTime,
tdecrypt: endTime,
},
});
})
.catch((err) => {
this.warn(`${err.name}: ${err.message}`);
this.state = State.IDLE;
});
}
}
doTick() {
if (!this.media) {
this.state = State.IDLE;
return;
}
if (this.state === State.IDLE) {
const { currentTrackId, levels } = this;
const track = levels?.[currentTrackId];
if (!track || !levels.length || !track.details) {
return;
}
const { config } = this;
const currentTime = this.getLoadPosition();
const bufferedInfo = BufferHelper.bufferedInfo(
this.tracksBuffered[this.currentTrackId] || [],
currentTime,
config.maxBufferHole,
);
const { end: targetBufferTime, len: bufferLen } = bufferedInfo;
const mainBufferInfo = this.getFwdBufferInfo(
this.media,
PlaylistLevelType.MAIN,
);
const trackDetails = track.details as LevelDetails;
const maxBufLen =
this.getMaxBufferLength(mainBufferInfo?.len) +
trackDetails.levelTargetDuration;
if (bufferLen > maxBufLen) {
return;
}
const fragments = trackDetails.fragments;
const fragLen = fragments.length;
const end = trackDetails.edge;
let foundFrag: Fragment | null = null;
const fragPrevious = this.fragPrevious;
if (targetBufferTime < end) {
const tolerance = config.maxFragLookUpTolerance;
const lookupTolerance =
targetBufferTime > end - tolerance ? 0 : tolerance;
foundFrag = findFragmentByPTS(
fragPrevious,
fragments,
Math.max(fragments[0].start, targetBufferTime),
lookupTolerance,
);
if (
!foundFrag &&
fragPrevious &&
fragPrevious.start < fragments[0].start
) {
foundFrag = fragments[0];
}
} else {
foundFrag = fragments[fragLen - 1];
}
if (!foundFrag) {
return;
}
foundFrag = this.mapToInitFragWhenRequired(foundFrag) as Fragment;
if (foundFrag.sn !== 'initSegment') {
// Load earlier fragment in same discontinuity to make up for misaligned playlists and cues that extend beyond end of segment
const curSNIdx = foundFrag.sn - trackDetails.startSN;
const prevFrag = fragments[curSNIdx - 1];
if (
prevFrag &&
prevFrag.cc === foundFrag.cc &&
this.fragmentTracker.getState(prevFrag) === FragmentState.NOT_LOADED
) {
foundFrag = prevFrag;
}
}
if (
this.fragmentTracker.getState(foundFrag) === FragmentState.NOT_LOADED
) {
// only load if fragment is not loaded
this.loadFragment(foundFrag, track, targetBufferTime);
}
}
}
protected getMaxBufferLength(mainBufferLength?: number): number {
const maxConfigBuffer = super.getMaxBufferLength();
if (!mainBufferLength) {
return maxConfigBuffer;
}
return Math.max(maxConfigBuffer, mainBufferLength);
}
protected loadFragment(
frag: Fragment,
level: Level,
targetBufferTime: number,
) {
this.fragCurrent = frag;
if (frag.sn === 'initSegment') {
this._loadInitSegment(frag, level);
} else {
this.startFragRequested = true;
super.loadFragment(frag, level, targetBufferTime);
}
}
get mediaBufferTimeRanges(): Bufferable {
return new BufferableInstance(
this.tracksBuffered[this.currentTrackId] || [],
);
}
}
class BufferableInstance implements Bufferable {
public readonly buffered: TimeRanges;
constructor(timeranges: TimeRange[]) {
const getRange = (
name: 'start' | 'end',
index: number,
length: number,
): number => {
index = index >>> 0;
if (index > length - 1) {
throw new DOMException(
`Failed to execute '${name}' on 'TimeRanges': The index provided (${index}) is greater than the maximum bound (${length})`,
);
}
return timeranges[index][name];
};
this.buffered = {
get length() {
return timeranges.length;
},
end(index: number): number {
return getRange('end', index, timeranges.length);
},
start(index: number): number {
return getRange('start', index, timeranges.length);
},
};
}
}

View File

@@ -0,0 +1,575 @@
import BasePlaylistController from './base-playlist-controller';
import { Events } from '../events';
import {
clearCurrentCues,
filterSubtitleTracks,
} from '../utils/texttrack-utils';
import { PlaylistContextType } from '../types/loader';
import {
mediaAttributesIdentical,
subtitleTrackMatchesTextTrack,
} from '../utils/media-option-attributes';
import { findMatchingOption, matchesOption } from '../utils/rendition-helper';
import type Hls from '../hls';
import type {
MediaPlaylist,
SubtitleSelectionOption,
} from '../types/media-playlist';
import type { HlsUrlParameters } from '../types/level';
import type {
ErrorData,
LevelLoadingData,
MediaAttachedData,
SubtitleTracksUpdatedData,
ManifestParsedData,
TrackLoadedData,
LevelSwitchingData,
} from '../types/events';
class SubtitleTrackController extends BasePlaylistController {
private media: HTMLMediaElement | null = null;
private tracks: MediaPlaylist[] = [];
private groupIds: (string | undefined)[] | null = null;
private tracksInGroup: MediaPlaylist[] = [];
private trackId: number = -1;
private currentTrack: MediaPlaylist | null = null;
private selectDefaultTrack: boolean = true;
private queuedDefaultTrack: number = -1;
private asyncPollTrackChange: () => void = () => this.pollTrackChange(0);
private useTextTrackPolling: boolean = false;
private subtitlePollingInterval: number = -1;
private _subtitleDisplay: boolean = true;
constructor(hls: Hls) {
super(hls, '[subtitle-track-controller]');
this.registerListeners();
}
public destroy() {
this.unregisterListeners();
this.tracks.length = 0;
this.tracksInGroup.length = 0;
this.currentTrack = null;
this.onTextTracksChanged = this.asyncPollTrackChange = null as any;
super.destroy();
}
public get subtitleDisplay(): boolean {
return this._subtitleDisplay;
}
public set subtitleDisplay(value: boolean) {
this._subtitleDisplay = value;
if (this.trackId > -1) {
this.toggleTrackModes();
}
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
// Listen for subtitle track change, then extract the current track ID.
protected onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData,
): void {
this.media = data.media;
if (!this.media) {
return;
}
if (this.queuedDefaultTrack > -1) {
this.subtitleTrack = this.queuedDefaultTrack;
this.queuedDefaultTrack = -1;
}
this.useTextTrackPolling = !(
this.media.textTracks && 'onchange' in this.media.textTracks
);
if (this.useTextTrackPolling) {
this.pollTrackChange(500);
} else {
this.media.textTracks.addEventListener(
'change',
this.asyncPollTrackChange,
);
}
}
private pollTrackChange(timeout: number) {
self.clearInterval(this.subtitlePollingInterval);
this.subtitlePollingInterval = self.setInterval(
this.onTextTracksChanged,
timeout,
);
}
protected onMediaDetaching(): void {
if (!this.media) {
return;
}
self.clearInterval(this.subtitlePollingInterval);
if (!this.useTextTrackPolling) {
this.media.textTracks.removeEventListener(
'change',
this.asyncPollTrackChange,
);
}
if (this.trackId > -1) {
this.queuedDefaultTrack = this.trackId;
}
const textTracks = filterSubtitleTracks(this.media.textTracks);
// Clear loaded cues on media detachment from tracks
textTracks.forEach((track) => {
clearCurrentCues(track);
});
// Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
this.subtitleTrack = -1;
this.media = null;
}
protected onManifestLoading(): void {
this.tracks = [];
this.groupIds = null;
this.tracksInGroup = [];
this.trackId = -1;
this.currentTrack = null;
this.selectDefaultTrack = true;
}
// Fired whenever a new manifest is loaded.
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData,
): void {
this.tracks = data.subtitleTracks;
}
protected onSubtitleTrackLoaded(
event: Events.SUBTITLE_TRACK_LOADED,
data: TrackLoadedData,
): void {
const { id, groupId, details } = data;
const trackInActiveGroup = this.tracksInGroup[id];
if (!trackInActiveGroup || trackInActiveGroup.groupId !== groupId) {
this.warn(
`Subtitle track with id:${id} and group:${groupId} not found in active group ${trackInActiveGroup?.groupId}`,
);
return;
}
const curDetails = trackInActiveGroup.details;
trackInActiveGroup.details = data.details;
this.log(
`Subtitle track ${id} "${trackInActiveGroup.name}" lang:${trackInActiveGroup.lang} group:${groupId} loaded [${details.startSN}-${details.endSN}]`,
);
if (id === this.trackId) {
this.playlistLoaded(id, data, curDetails);
}
}
protected onLevelLoading(
event: Events.LEVEL_LOADING,
data: LevelLoadingData,
): void {
this.switchLevel(data.level);
}
protected onLevelSwitching(
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData,
): void {
this.switchLevel(data.level);
}
private switchLevel(levelIndex: number) {
const levelInfo = this.hls.levels[levelIndex];
if (!levelInfo) {
return;
}
const subtitleGroups = levelInfo.subtitleGroups || null;
const currentGroups = this.groupIds;
let currentTrack = this.currentTrack;
if (
!subtitleGroups ||
currentGroups?.length !== subtitleGroups?.length ||
subtitleGroups?.some((groupId) => currentGroups?.indexOf(groupId) === -1)
) {
this.groupIds = subtitleGroups;
this.trackId = -1;
this.currentTrack = null;
const subtitleTracks = this.tracks.filter(
(track): boolean =>
!subtitleGroups || subtitleGroups.indexOf(track.groupId) !== -1,
);
if (subtitleTracks.length) {
// Disable selectDefaultTrack if there are no default tracks
if (
this.selectDefaultTrack &&
!subtitleTracks.some((track) => track.default)
) {
this.selectDefaultTrack = false;
}
// track.id should match hls.audioTracks index
subtitleTracks.forEach((track, i) => {
track.id = i;
});
} else if (!currentTrack && !this.tracksInGroup.length) {
// Do not dispatch SUBTITLE_TRACKS_UPDATED when there were and are no tracks
return;
}
this.tracksInGroup = subtitleTracks;
// Find preferred track
const subtitlePreference = this.hls.config.subtitlePreference;
if (!currentTrack && subtitlePreference) {
this.selectDefaultTrack = false;
const groupIndex = findMatchingOption(
subtitlePreference,
subtitleTracks,
);
if (groupIndex > -1) {
currentTrack = subtitleTracks[groupIndex];
} else {
const allIndex = findMatchingOption(subtitlePreference, this.tracks);
currentTrack = this.tracks[allIndex];
}
}
// Select initial track
let trackId = this.findTrackId(currentTrack);
if (trackId === -1 && currentTrack) {
trackId = this.findTrackId(null);
}
// Dispatch events and load track if needed
const subtitleTracksUpdated: SubtitleTracksUpdatedData = {
subtitleTracks,
};
this.log(
`Updating subtitle tracks, ${
subtitleTracks.length
} track(s) found in "${subtitleGroups?.join(',')}" group-id`,
);
this.hls.trigger(Events.SUBTITLE_TRACKS_UPDATED, subtitleTracksUpdated);
if (trackId !== -1 && this.trackId === -1) {
this.setSubtitleTrack(trackId);
}
} else if (this.shouldReloadPlaylist(currentTrack)) {
// Retry playlist loading if no playlist is or has been loaded yet
this.setSubtitleTrack(this.trackId);
}
}
private findTrackId(currentTrack: MediaPlaylist | null): number {
const tracks = this.tracksInGroup;
const selectDefault = this.selectDefaultTrack;
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (
(selectDefault && !track.default) ||
(!selectDefault && !currentTrack)
) {
continue;
}
if (!currentTrack || matchesOption(track, currentTrack)) {
return i;
}
}
if (currentTrack) {
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (
mediaAttributesIdentical(currentTrack.attrs, track.attrs, [
'LANGUAGE',
'ASSOC-LANGUAGE',
'CHARACTERISTICS',
])
) {
return i;
}
}
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (
mediaAttributesIdentical(currentTrack.attrs, track.attrs, [
'LANGUAGE',
])
) {
return i;
}
}
}
return -1;
}
private findTrackForTextTrack(textTrack: TextTrack | null): number {
if (textTrack) {
const tracks = this.tracksInGroup;
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (subtitleTrackMatchesTextTrack(track, textTrack)) {
return i;
}
}
}
return -1;
}
protected onError(event: Events.ERROR, data: ErrorData): void {
if (data.fatal || !data.context) {
return;
}
if (
data.context.type === PlaylistContextType.SUBTITLE_TRACK &&
data.context.id === this.trackId &&
(!this.groupIds || this.groupIds.indexOf(data.context.groupId) !== -1)
) {
this.checkRetry(data);
}
}
get allSubtitleTracks(): MediaPlaylist[] {
return this.tracks;
}
/** get alternate subtitle tracks list from playlist **/
get subtitleTracks(): MediaPlaylist[] {
return this.tracksInGroup;
}
/** get/set index of the selected subtitle track (based on index in subtitle track lists) **/
get subtitleTrack(): number {
return this.trackId;
}
set subtitleTrack(newId: number) {
this.selectDefaultTrack = false;
this.setSubtitleTrack(newId);
}
public setSubtitleOption(
subtitleOption: MediaPlaylist | SubtitleSelectionOption | undefined,
): MediaPlaylist | null {
this.hls.config.subtitlePreference = subtitleOption;
if (subtitleOption) {
const allSubtitleTracks = this.allSubtitleTracks;
this.selectDefaultTrack = false;
if (allSubtitleTracks.length) {
// First see if current option matches (no switch op)
const currentTrack = this.currentTrack;
if (currentTrack && matchesOption(subtitleOption, currentTrack)) {
return currentTrack;
}
// Find option in current group
const groupIndex = findMatchingOption(
subtitleOption,
this.tracksInGroup,
);
if (groupIndex > -1) {
const track = this.tracksInGroup[groupIndex];
this.setSubtitleTrack(groupIndex);
return track;
} else if (currentTrack) {
// If this is not the initial selection return null
// option should have matched one in active group
return null;
} else {
// Find the option in all tracks for initial selection
const allIndex = findMatchingOption(
subtitleOption,
allSubtitleTracks,
);
if (allIndex > -1) {
return allSubtitleTracks[allIndex];
}
}
}
}
return null;
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {
super.loadPlaylist();
const currentTrack = this.currentTrack;
if (this.shouldLoadPlaylist(currentTrack) && currentTrack) {
const id = currentTrack.id;
const groupId = currentTrack.groupId as string;
let url = currentTrack.url;
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`,
);
}
}
this.log(`Loading subtitle playlist for id ${id}`);
this.hls.trigger(Events.SUBTITLE_TRACK_LOADING, {
url,
id,
groupId,
deliveryDirectives: hlsUrlParameters || null,
});
}
}
/**
* Disables the old subtitleTrack and sets current mode on the next subtitleTrack.
* This operates on the DOM textTracks.
* A value of -1 will disable all subtitle tracks.
*/
private toggleTrackModes(): void {
const { media } = this;
if (!media) {
return;
}
const textTracks = filterSubtitleTracks(media.textTracks);
const currentTrack = this.currentTrack;
let nextTrack;
if (currentTrack) {
nextTrack = textTracks.filter((textTrack) =>
subtitleTrackMatchesTextTrack(currentTrack, textTrack),
)[0];
if (!nextTrack) {
this.warn(
`Unable to find subtitle TextTrack with name "${currentTrack.name}" and language "${currentTrack.lang}"`,
);
}
}
[].slice.call(textTracks).forEach((track) => {
if (track.mode !== 'disabled' && track !== nextTrack) {
track.mode = 'disabled';
}
});
if (nextTrack) {
const mode = this.subtitleDisplay ? 'showing' : 'hidden';
if (nextTrack.mode !== mode) {
nextTrack.mode = mode;
}
}
}
/**
* This method is responsible for validating the subtitle index and periodically reloading if live.
* Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track.
*/
private setSubtitleTrack(newId: number): void {
const tracks = this.tracksInGroup;
// setting this.subtitleTrack will trigger internal logic
// if media has not been attached yet, it will fail
// we keep a reference to the default track id
// and we'll set subtitleTrack when onMediaAttached is triggered
if (!this.media) {
this.queuedDefaultTrack = newId;
return;
}
// exit if track id as already set or invalid
if (newId < -1 || newId >= tracks.length || !Number.isFinite(newId)) {
this.warn(`Invalid subtitle track id: ${newId}`);
return;
}
// stopping live reloading timer if any
this.clearTimer();
this.selectDefaultTrack = false;
const lastTrack = this.currentTrack;
const track: MediaPlaylist | null = tracks[newId] || null;
this.trackId = newId;
this.currentTrack = track;
this.toggleTrackModes();
if (!track) {
// switch to -1
this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, { id: newId });
return;
}
const trackLoaded = !!track.details && !track.details.live;
if (newId === this.trackId && track === lastTrack && trackLoaded) {
return;
}
this.log(
`Switching to subtitle-track ${newId}` +
(track
? ` "${track.name}" lang:${track.lang} group:${track.groupId}`
: ''),
);
const { id, groupId = '', name, type, url } = track;
this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, {
id,
groupId,
name,
type,
url,
});
const hlsUrlParameters = this.switchParams(
track.url,
lastTrack?.details,
track.details,
);
this.loadPlaylist(hlsUrlParameters);
}
private onTextTracksChanged = () => {
if (!this.useTextTrackPolling) {
self.clearInterval(this.subtitlePollingInterval);
}
// Media is undefined when switching streams via loadSource()
if (!this.media || !this.hls.config.renderTextTracksNatively) {
return;
}
let textTrack: TextTrack | null = null;
const tracks = filterSubtitleTracks(this.media.textTracks);
for (let i = 0; i < tracks.length; i++) {
if (tracks[i].mode === 'hidden') {
// Do not break in case there is a following track with showing.
textTrack = tracks[i];
} else if (tracks[i].mode === 'showing') {
textTrack = tracks[i];
break;
}
}
// Find internal track index for TextTrack
const trackId = this.findTrackForTextTrack(textTrack);
if (this.subtitleTrack !== trackId) {
this.setSubtitleTrack(trackId);
}
};
}
export default SubtitleTrackController;

View File

@@ -0,0 +1,794 @@
import { Events } from '../events';
import Cea608Parser, { CaptionScreen } from '../utils/cea-608-parser';
import OutputFilter from '../utils/output-filter';
import { parseWebVTT } from '../utils/webvtt-parser';
import {
sendAddTrackEvent,
clearCurrentCues,
addCueToTrack,
removeCuesInRange,
filterSubtitleTracks,
} from '../utils/texttrack-utils';
import {
subtitleOptionsIdentical,
subtitleTrackMatchesTextTrack,
} from '../utils/media-option-attributes';
import { parseIMSC1, IMSC1_CODEC } from '../utils/imsc1-ttml-parser';
import { appendUint8Array } from '../utils/mp4-tools';
import { PlaylistLevelType } from '../types/loader';
import { Fragment } from '../loader/fragment';
import type {
FragParsingUserdataData,
FragLoadedData,
FragDecryptedData,
MediaAttachingData,
ManifestLoadedData,
InitPTSFoundData,
SubtitleTracksUpdatedData,
BufferFlushingData,
FragLoadingData,
} from '../types/events';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { ComponentAPI } from '../types/component-api';
import type { HlsConfig } from '../config';
import type { CuesInterface } from '../utils/cues';
import type { MediaPlaylist } from '../types/media-playlist';
import type { VTTCCs } from '../types/vtt';
import type { RationalTimestamp } from '../utils/timescale-conversion';
type TrackProperties = {
label: string;
languageCode: string;
media?: MediaPlaylist;
};
type NonNativeCaptionsTrack = {
_id?: string;
label: string;
kind: string;
default: boolean;
closedCaptions?: MediaPlaylist;
subtitleTrack?: MediaPlaylist;
};
export class TimelineController implements ComponentAPI {
private hls: Hls;
private media: HTMLMediaElement | null = null;
private config: HlsConfig;
private enabled: boolean = true;
private Cues: CuesInterface;
private textTracks: Array<TextTrack> = [];
private tracks: Array<MediaPlaylist> = [];
private initPTS: RationalTimestamp[] = [];
private unparsedVttFrags: Array<FragLoadedData | FragDecryptedData> = [];
private captionsTracks: Record<string, TextTrack> = {};
private nonNativeCaptionsTracks: Record<string, NonNativeCaptionsTrack> = {};
private cea608Parser1?: Cea608Parser;
private cea608Parser2?: Cea608Parser;
private lastCc: number = -1; // Last video (CEA-608) fragment CC
private lastSn: number = -1; // Last video (CEA-608) fragment MSN
private lastPartIndex: number = -1; // Last video (CEA-608) fragment Part Index
private prevCC: number = -1; // Last subtitle fragment CC
private vttCCs: VTTCCs = newVTTCCs();
private captionsProperties: {
textTrack1: TrackProperties;
textTrack2: TrackProperties;
textTrack3: TrackProperties;
textTrack4: TrackProperties;
};
constructor(hls: Hls) {
this.hls = hls;
this.config = hls.config;
this.Cues = hls.config.cueHandler;
this.captionsProperties = {
textTrack1: {
label: this.config.captionsTextTrack1Label,
languageCode: this.config.captionsTextTrack1LanguageCode,
},
textTrack2: {
label: this.config.captionsTextTrack2Label,
languageCode: this.config.captionsTextTrack2LanguageCode,
},
textTrack3: {
label: this.config.captionsTextTrack3Label,
languageCode: this.config.captionsTextTrack3LanguageCode,
},
textTrack4: {
label: this.config.captionsTextTrack4Label,
languageCode: this.config.captionsTextTrack4LanguageCode,
},
};
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.on(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
hls.on(Events.FRAG_DECRYPTED, this.onFragDecrypted, this);
hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.on(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
public destroy(): void {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.off(Events.FRAG_LOADING, this.onFragLoading, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.off(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
hls.off(Events.FRAG_DECRYPTED, this.onFragDecrypted, this);
hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.off(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
// @ts-ignore
this.hls = this.config = null;
this.cea608Parser1 = this.cea608Parser2 = undefined;
}
private initCea608Parsers() {
if (
this.config.enableCEA708Captions &&
(!this.cea608Parser1 || !this.cea608Parser2)
) {
const channel1 = new OutputFilter(this, 'textTrack1');
const channel2 = new OutputFilter(this, 'textTrack2');
const channel3 = new OutputFilter(this, 'textTrack3');
const channel4 = new OutputFilter(this, 'textTrack4');
this.cea608Parser1 = new Cea608Parser(1, channel1, channel2);
this.cea608Parser2 = new Cea608Parser(3, channel3, channel4);
}
}
public addCues(
trackName: string,
startTime: number,
endTime: number,
screen: CaptionScreen,
cueRanges: Array<[number, number]>,
) {
// skip cues which overlap more than 50% with previously parsed time ranges
let merged = false;
for (let i = cueRanges.length; i--; ) {
const cueRange = cueRanges[i];
const overlap = intersection(
cueRange[0],
cueRange[1],
startTime,
endTime,
);
if (overlap >= 0) {
cueRange[0] = Math.min(cueRange[0], startTime);
cueRange[1] = Math.max(cueRange[1], endTime);
merged = true;
if (overlap / (endTime - startTime) > 0.5) {
return;
}
}
}
if (!merged) {
cueRanges.push([startTime, endTime]);
}
if (this.config.renderTextTracksNatively) {
const track = this.captionsTracks[trackName];
this.Cues.newCue(track, startTime, endTime, screen);
} else {
const cues = this.Cues.newCue(null, startTime, endTime, screen);
this.hls.trigger(Events.CUES_PARSED, {
type: 'captions',
cues,
track: trackName,
});
}
}
// Triggered when an initial PTS is found; used for synchronisation of WebVTT.
private onInitPtsFound(
event: Events.INIT_PTS_FOUND,
{ frag, id, initPTS, timescale }: InitPTSFoundData,
) {
const { unparsedVttFrags } = this;
if (id === 'main') {
this.initPTS[frag.cc] = { baseTime: initPTS, timescale };
}
// Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded.
// Parse any unparsed fragments upon receiving the initial PTS.
if (unparsedVttFrags.length) {
this.unparsedVttFrags = [];
unparsedVttFrags.forEach((frag) => {
this.onFragLoaded(Events.FRAG_LOADED, frag as FragLoadedData);
});
}
}
private getExistingTrack(label: string, language: string): TextTrack | null {
const { media } = this;
if (media) {
for (let i = 0; i < media.textTracks.length; i++) {
const textTrack = media.textTracks[i];
if (
canReuseVttTextTrack(textTrack, {
name: label,
lang: language,
attrs: {} as any,
})
) {
return textTrack;
}
}
}
return null;
}
public createCaptionsTrack(trackName: string) {
if (this.config.renderTextTracksNatively) {
this.createNativeTrack(trackName);
} else {
this.createNonNativeTrack(trackName);
}
}
private createNativeTrack(trackName: string) {
if (this.captionsTracks[trackName]) {
return;
}
const { captionsProperties, captionsTracks, media } = this;
const { label, languageCode } = captionsProperties[trackName];
// Enable reuse of existing text track.
const existingTrack = this.getExistingTrack(label, languageCode);
if (!existingTrack) {
const textTrack = this.createTextTrack('captions', label, languageCode);
if (textTrack) {
// Set a special property on the track so we know it's managed by Hls.js
textTrack[trackName] = true;
captionsTracks[trackName] = textTrack;
}
} else {
captionsTracks[trackName] = existingTrack;
clearCurrentCues(captionsTracks[trackName]);
sendAddTrackEvent(captionsTracks[trackName], media as HTMLMediaElement);
}
}
private createNonNativeTrack(trackName: string) {
if (this.nonNativeCaptionsTracks[trackName]) {
return;
}
// Create a list of a single track for the provider to consume
const trackProperties: TrackProperties = this.captionsProperties[trackName];
if (!trackProperties) {
return;
}
const label = trackProperties.label as string;
const track = {
_id: trackName,
label,
kind: 'captions',
default: trackProperties.media ? !!trackProperties.media.default : false,
closedCaptions: trackProperties.media,
};
this.nonNativeCaptionsTracks[trackName] = track;
this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, { tracks: [track] });
}
private createTextTrack(
kind: TextTrackKind,
label: string,
lang?: string,
): TextTrack | undefined {
const media = this.media;
if (!media) {
return;
}
return media.addTextTrack(kind, label, lang);
}
private onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData,
) {
this.media = data.media;
this._cleanTracks();
}
private onMediaDetaching() {
const { captionsTracks } = this;
Object.keys(captionsTracks).forEach((trackName) => {
clearCurrentCues(captionsTracks[trackName]);
delete captionsTracks[trackName];
});
this.nonNativeCaptionsTracks = {};
}
private onManifestLoading() {
// Detect discontinuity in video fragment (CEA-608) parsing
this.lastCc = -1;
this.lastSn = -1;
this.lastPartIndex = -1;
// Detect discontinuity in subtitle manifests
this.prevCC = -1;
this.vttCCs = newVTTCCs();
// Reset tracks
this._cleanTracks();
this.tracks = [];
this.captionsTracks = {};
this.nonNativeCaptionsTracks = {};
this.textTracks = [];
this.unparsedVttFrags = [];
this.initPTS = [];
if (this.cea608Parser1 && this.cea608Parser2) {
this.cea608Parser1.reset();
this.cea608Parser2.reset();
}
}
private _cleanTracks() {
// clear outdated subtitles
const { media } = this;
if (!media) {
return;
}
const textTracks = media.textTracks;
if (textTracks) {
for (let i = 0; i < textTracks.length; i++) {
clearCurrentCues(textTracks[i]);
}
}
}
private onSubtitleTracksUpdated(
event: Events.SUBTITLE_TRACKS_UPDATED,
data: SubtitleTracksUpdatedData,
) {
const tracks: Array<MediaPlaylist> = data.subtitleTracks || [];
const hasIMSC1 = tracks.some((track) => track.textCodec === IMSC1_CODEC);
if (this.config.enableWebVTT || (hasIMSC1 && this.config.enableIMSC1)) {
const listIsIdentical = subtitleOptionsIdentical(this.tracks, tracks);
if (listIsIdentical) {
this.tracks = tracks;
return;
}
this.textTracks = [];
this.tracks = tracks;
if (this.config.renderTextTracksNatively) {
const media = this.media;
const inUseTracks: (TextTrack | null)[] | null = media
? filterSubtitleTracks(media.textTracks)
: null;
this.tracks.forEach((track, index) => {
// Reuse tracks with the same label and lang, but do not reuse 608/708 tracks
let textTrack: TextTrack | undefined;
if (inUseTracks) {
let inUseTrack: TextTrack | null = null;
for (let i = 0; i < inUseTracks.length; i++) {
if (
inUseTracks[i] &&
canReuseVttTextTrack(inUseTracks[i], track)
) {
inUseTrack = inUseTracks[i];
inUseTracks[i] = null;
break;
}
}
if (inUseTrack) {
textTrack = inUseTrack;
}
}
if (textTrack) {
clearCurrentCues(textTrack);
} else {
const textTrackKind = captionsOrSubtitlesFromCharacteristics(track);
textTrack = this.createTextTrack(
textTrackKind,
track.name,
track.lang,
);
if (textTrack) {
textTrack.mode = 'disabled';
}
}
if (textTrack) {
this.textTracks.push(textTrack);
}
});
// Warn when video element has captions or subtitle TextTracks carried over from another source
if (inUseTracks?.length) {
const unusedTextTracks = inUseTracks
.filter((t) => t !== null)
.map((t) => (t as TextTrack).label);
if (unusedTextTracks.length) {
logger.warn(
`Media element contains unused subtitle tracks: ${unusedTextTracks.join(
', ',
)}. Replace media element for each source to clear TextTracks and captions menu.`,
);
}
}
} else if (this.tracks.length) {
// Create a list of tracks for the provider to consume
const tracksList = this.tracks.map((track) => {
return {
label: track.name,
kind: track.type.toLowerCase(),
default: track.default,
subtitleTrack: track,
};
});
this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, {
tracks: tracksList,
});
}
}
}
private onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData,
) {
if (this.config.enableCEA708Captions && data.captions) {
data.captions.forEach((captionsTrack) => {
const instreamIdMatch = /(?:CC|SERVICE)([1-4])/.exec(
captionsTrack.instreamId as string,
);
if (!instreamIdMatch) {
return;
}
const trackName = `textTrack${instreamIdMatch[1]}`;
const trackProperties: TrackProperties =
this.captionsProperties[trackName];
if (!trackProperties) {
return;
}
trackProperties.label = captionsTrack.name;
if (captionsTrack.lang) {
// optional attribute
trackProperties.languageCode = captionsTrack.lang;
}
trackProperties.media = captionsTrack;
});
}
}
private closedCaptionsForLevel(frag: Fragment): string | undefined {
const level = this.hls.levels[frag.level];
return level?.attrs['CLOSED-CAPTIONS'];
}
private onFragLoading(event: Events.FRAG_LOADING, data: FragLoadingData) {
// if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
if (this.enabled && data.frag.type === PlaylistLevelType.MAIN) {
const { cea608Parser1, cea608Parser2, lastSn } = this;
const { cc, sn } = data.frag;
const partIndex = data.part?.index ?? -1;
if (cea608Parser1 && cea608Parser2) {
if (
sn !== lastSn + 1 ||
(sn === lastSn && partIndex !== this.lastPartIndex + 1) ||
cc !== this.lastCc
) {
cea608Parser1.reset();
cea608Parser2.reset();
}
}
this.lastCc = cc as number;
this.lastSn = sn as number;
this.lastPartIndex = partIndex;
}
}
private onFragLoaded(
event: Events.FRAG_LOADED,
data: FragDecryptedData | FragLoadedData,
) {
const { frag, payload } = data;
if (frag.type === PlaylistLevelType.SUBTITLE) {
// If fragment is subtitle type, parse as WebVTT.
if (payload.byteLength) {
const decryptData = frag.decryptdata;
// fragment after decryption has a stats object
const decrypted = 'stats' in data;
// If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait.
if (decryptData == null || !decryptData.encrypted || decrypted) {
const trackPlaylistMedia = this.tracks[frag.level];
const vttCCs = this.vttCCs;
if (!vttCCs[frag.cc]) {
vttCCs[frag.cc] = {
start: frag.start,
prevCC: this.prevCC,
new: true,
};
this.prevCC = frag.cc;
}
if (
trackPlaylistMedia &&
trackPlaylistMedia.textCodec === IMSC1_CODEC
) {
this._parseIMSC1(frag, payload);
} else {
this._parseVTTs(data);
}
}
} else {
// In case there is no payload, finish unsuccessfully.
this.hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag,
error: new Error('Empty subtitle payload'),
});
}
}
}
private _parseIMSC1(frag: Fragment, payload: ArrayBuffer) {
const hls = this.hls;
parseIMSC1(
payload,
this.initPTS[frag.cc],
(cues) => {
this._appendCues(cues, frag.level);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: true,
frag: frag,
});
},
(error) => {
logger.log(`Failed to parse IMSC1: ${error}`);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag: frag,
error,
});
},
);
}
private _parseVTTs(data: FragDecryptedData | FragLoadedData) {
const { frag, payload } = data;
// We need an initial synchronisation PTS. Store fragments as long as none has arrived
const { initPTS, unparsedVttFrags } = this;
const maxAvCC = initPTS.length - 1;
if (!initPTS[frag.cc] && maxAvCC === -1) {
unparsedVttFrags.push(data);
return;
}
const hls = this.hls;
// Parse the WebVTT file contents.
const payloadWebVTT = frag.initSegment?.data
? appendUint8Array(frag.initSegment.data, new Uint8Array(payload))
: payload;
parseWebVTT(
payloadWebVTT,
this.initPTS[frag.cc],
this.vttCCs,
frag.cc,
frag.start,
(cues) => {
this._appendCues(cues, frag.level);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: true,
frag: frag,
});
},
(error) => {
const missingInitPTS =
error.message === 'Missing initPTS for VTT MPEGTS';
if (missingInitPTS) {
unparsedVttFrags.push(data);
} else {
this._fallbackToIMSC1(frag, payload);
}
// Something went wrong while parsing. Trigger event with success false.
logger.log(`Failed to parse VTT cue: ${error}`);
if (missingInitPTS && maxAvCC > frag.cc) {
return;
}
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag: frag,
error,
});
},
);
}
private _fallbackToIMSC1(frag: Fragment, payload: ArrayBuffer) {
// If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result
const trackPlaylistMedia = this.tracks[frag.level];
if (!trackPlaylistMedia.textCodec) {
parseIMSC1(
payload,
this.initPTS[frag.cc],
() => {
trackPlaylistMedia.textCodec = IMSC1_CODEC;
this._parseIMSC1(frag, payload);
},
() => {
trackPlaylistMedia.textCodec = 'wvtt';
},
);
}
}
private _appendCues(cues: VTTCue[], fragLevel: number) {
const hls = this.hls;
if (this.config.renderTextTracksNatively) {
const textTrack = this.textTracks[fragLevel];
// WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled"
// before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null
// and trying to access getCueById method of cues will throw an exception
// Because we check if the mode is disabled, we can force check `cues` below. They can't be null.
if (!textTrack || textTrack.mode === 'disabled') {
return;
}
cues.forEach((cue) => addCueToTrack(textTrack, cue));
} else {
const currentTrack = this.tracks[fragLevel];
if (!currentTrack) {
return;
}
const track = currentTrack.default ? 'default' : 'subtitles' + fragLevel;
hls.trigger(Events.CUES_PARSED, { type: 'subtitles', cues, track });
}
}
private onFragDecrypted(
event: Events.FRAG_DECRYPTED,
data: FragDecryptedData,
) {
const { frag } = data;
if (frag.type === PlaylistLevelType.SUBTITLE) {
this.onFragLoaded(Events.FRAG_LOADED, data as unknown as FragLoadedData);
}
}
private onSubtitleTracksCleared() {
this.tracks = [];
this.captionsTracks = {};
}
private onFragParsingUserdata(
event: Events.FRAG_PARSING_USERDATA,
data: FragParsingUserdataData,
) {
this.initCea608Parsers();
const { cea608Parser1, cea608Parser2 } = this;
if (!this.enabled || !cea608Parser1 || !cea608Parser2) {
return;
}
const { frag, samples } = data;
if (
frag.type === PlaylistLevelType.MAIN &&
this.closedCaptionsForLevel(frag) === 'NONE'
) {
return;
}
// If the event contains captions (found in the bytes property), push all bytes into the parser immediately
// It will create the proper timestamps based on the PTS value
for (let i = 0; i < samples.length; i++) {
const ccBytes = samples[i].bytes;
if (ccBytes) {
const ccdatas = this.extractCea608Data(ccBytes);
cea608Parser1.addData(samples[i].pts, ccdatas[0]);
cea608Parser2.addData(samples[i].pts, ccdatas[1]);
}
}
}
onBufferFlushing(
event: Events.BUFFER_FLUSHING,
{ startOffset, endOffset, endOffsetSubtitles, type }: BufferFlushingData,
) {
const { media } = this;
if (!media || media.currentTime < endOffset) {
return;
}
// Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed
// Forward cues are never removed because we can loose streamed 608 content from recent fragments
if (!type || type === 'video') {
const { captionsTracks } = this;
Object.keys(captionsTracks).forEach((trackName) =>
removeCuesInRange(captionsTracks[trackName], startOffset, endOffset),
);
}
if (this.config.renderTextTracksNatively) {
// Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed
if (startOffset === 0 && endOffsetSubtitles !== undefined) {
const { textTracks } = this;
Object.keys(textTracks).forEach((trackName) =>
removeCuesInRange(
textTracks[trackName],
startOffset,
endOffsetSubtitles,
),
);
}
}
}
private extractCea608Data(byteArray: Uint8Array): number[][] {
const actualCCBytes: number[][] = [[], []];
const count = byteArray[0] & 0x1f;
let position = 2;
for (let j = 0; j < count; j++) {
const tmpByte = byteArray[position++];
const ccbyte1 = 0x7f & byteArray[position++];
const ccbyte2 = 0x7f & byteArray[position++];
if (ccbyte1 === 0 && ccbyte2 === 0) {
continue;
}
const ccValid = (0x04 & tmpByte) !== 0; // Support all four channels
if (ccValid) {
const ccType = 0x03 & tmpByte;
if (
0x00 /* CEA608 field1*/ === ccType ||
0x01 /* CEA608 field2*/ === ccType
) {
// Exclude CEA708 CC data.
actualCCBytes[ccType].push(ccbyte1);
actualCCBytes[ccType].push(ccbyte2);
}
}
}
return actualCCBytes;
}
}
function captionsOrSubtitlesFromCharacteristics(
track: Pick<MediaPlaylist, 'name' | 'lang' | 'attrs' | 'characteristics'>,
): TextTrackKind {
if (track.characteristics) {
if (
/transcribes-spoken-dialog/gi.test(track.characteristics) &&
/describes-music-and-sound/gi.test(track.characteristics)
) {
return 'captions';
}
}
return 'subtitles';
}
function canReuseVttTextTrack(
inUseTrack: TextTrack | null,
manifestTrack: Pick<
MediaPlaylist,
'name' | 'lang' | 'attrs' | 'characteristics'
>,
): boolean {
return (
!!inUseTrack &&
inUseTrack.kind === captionsOrSubtitlesFromCharacteristics(manifestTrack) &&
subtitleTrackMatchesTextTrack(manifestTrack, inUseTrack)
);
}
function intersection(x1: number, x2: number, y1: number, y2: number): number {
return Math.min(x2, y2) - Math.max(x1, y1);
}
function newVTTCCs(): VTTCCs {
return {
ccOffset: 0,
presentationOffset: 0,
0: {
start: 0,
prevCC: -1,
new: true,
},
};
}

13
server/node_modules/hls.js/src/crypt/aes-crypto.ts generated vendored Normal file
View File

@@ -0,0 +1,13 @@
export default class AESCrypto {
private subtle: SubtleCrypto;
private aesIV: Uint8Array;
constructor(subtle: SubtleCrypto, iv: Uint8Array) {
this.subtle = subtle;
this.aesIV = iv;
}
decrypt(data: ArrayBuffer, key: CryptoKey) {
return this.subtle.decrypt({ name: 'AES-CBC', iv: this.aesIV }, key, data);
}
}

337
server/node_modules/hls.js/src/crypt/aes-decryptor.ts generated vendored Normal file
View File

@@ -0,0 +1,337 @@
import { sliceUint8 } from '../utils/typed-array';
// PKCS7
export function removePadding(array: Uint8Array): Uint8Array {
const outputBytes = array.byteLength;
const paddingBytes =
outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1);
if (paddingBytes) {
return sliceUint8(array, 0, outputBytes - paddingBytes);
}
return array;
}
export default class AESDecryptor {
private rcon: Array<number> = [
0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
];
private subMix: Array<Uint32Array> = [
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
];
private invSubMix: Array<Uint32Array> = [
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
];
private sBox: Uint32Array = new Uint32Array(256);
private invSBox: Uint32Array = new Uint32Array(256);
private key: Uint32Array = new Uint32Array(0);
private ksRows: number = 0;
private keySize: number = 0;
private keySchedule!: Uint32Array;
private invKeySchedule!: Uint32Array;
constructor() {
this.initTable();
}
// Using view.getUint32() also swaps the byte order.
uint8ArrayToUint32Array_(arrayBuffer) {
const view = new DataView(arrayBuffer);
const newArray = new Uint32Array(4);
for (let i = 0; i < 4; i++) {
newArray[i] = view.getUint32(i * 4);
}
return newArray;
}
initTable() {
const sBox = this.sBox;
const invSBox = this.invSBox;
const subMix = this.subMix;
const subMix0 = subMix[0];
const subMix1 = subMix[1];
const subMix2 = subMix[2];
const subMix3 = subMix[3];
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
const d = new Uint32Array(256);
let x = 0;
let xi = 0;
let i = 0;
for (i = 0; i < 256; i++) {
if (i < 128) {
d[i] = i << 1;
} else {
d[i] = (i << 1) ^ 0x11b;
}
}
for (i = 0; i < 256; i++) {
let sx = xi ^ (xi << 1) ^ (xi << 2) ^ (xi << 3) ^ (xi << 4);
sx = (sx >>> 8) ^ (sx & 0xff) ^ 0x63;
sBox[x] = sx;
invSBox[sx] = x;
// Compute multiplication
const x2 = d[x];
const x4 = d[x2];
const x8 = d[x4];
// Compute sub/invSub bytes, mix columns tables
let t = (d[sx] * 0x101) ^ (sx * 0x1010100);
subMix0[x] = (t << 24) | (t >>> 8);
subMix1[x] = (t << 16) | (t >>> 16);
subMix2[x] = (t << 8) | (t >>> 24);
subMix3[x] = t;
// Compute inv sub bytes, inv mix columns tables
t = (x8 * 0x1010101) ^ (x4 * 0x10001) ^ (x2 * 0x101) ^ (x * 0x1010100);
invSubMix0[sx] = (t << 24) | (t >>> 8);
invSubMix1[sx] = (t << 16) | (t >>> 16);
invSubMix2[sx] = (t << 8) | (t >>> 24);
invSubMix3[sx] = t;
// Compute next counter
if (!x) {
x = xi = 1;
} else {
x = x2 ^ d[d[d[x8 ^ x2]]];
xi ^= d[d[xi]];
}
}
}
expandKey(keyBuffer: ArrayBuffer) {
// convert keyBuffer to Uint32Array
const key = this.uint8ArrayToUint32Array_(keyBuffer);
let sameKey = true;
let offset = 0;
while (offset < key.length && sameKey) {
sameKey = key[offset] === this.key[offset];
offset++;
}
if (sameKey) {
return;
}
this.key = key;
const keySize = (this.keySize = key.length);
if (keySize !== 4 && keySize !== 6 && keySize !== 8) {
throw new Error('Invalid aes key size=' + keySize);
}
const ksRows = (this.ksRows = (keySize + 6 + 1) * 4);
let ksRow;
let invKsRow;
const keySchedule = (this.keySchedule = new Uint32Array(ksRows));
const invKeySchedule = (this.invKeySchedule = new Uint32Array(ksRows));
const sbox = this.sBox;
const rcon = this.rcon;
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
let prev;
let t;
for (ksRow = 0; ksRow < ksRows; ksRow++) {
if (ksRow < keySize) {
prev = keySchedule[ksRow] = key[ksRow];
continue;
}
t = prev;
if (ksRow % keySize === 0) {
// Rot word
t = (t << 8) | (t >>> 24);
// Sub word
t =
(sbox[t >>> 24] << 24) |
(sbox[(t >>> 16) & 0xff] << 16) |
(sbox[(t >>> 8) & 0xff] << 8) |
sbox[t & 0xff];
// Mix Rcon
t ^= rcon[(ksRow / keySize) | 0] << 24;
} else if (keySize > 6 && ksRow % keySize === 4) {
// Sub word
t =
(sbox[t >>> 24] << 24) |
(sbox[(t >>> 16) & 0xff] << 16) |
(sbox[(t >>> 8) & 0xff] << 8) |
sbox[t & 0xff];
}
keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0;
}
for (invKsRow = 0; invKsRow < ksRows; invKsRow++) {
ksRow = ksRows - invKsRow;
if (invKsRow & 3) {
t = keySchedule[ksRow];
} else {
t = keySchedule[ksRow - 4];
}
if (invKsRow < 4 || ksRow <= 4) {
invKeySchedule[invKsRow] = t;
} else {
invKeySchedule[invKsRow] =
invSubMix0[sbox[t >>> 24]] ^
invSubMix1[sbox[(t >>> 16) & 0xff]] ^
invSubMix2[sbox[(t >>> 8) & 0xff]] ^
invSubMix3[sbox[t & 0xff]];
}
invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0;
}
}
// Adding this as a method greatly improves performance.
networkToHostOrderSwap(word) {
return (
(word << 24) |
((word & 0xff00) << 8) |
((word & 0xff0000) >> 8) |
(word >>> 24)
);
}
decrypt(inputArrayBuffer: ArrayBuffer, offset: number, aesIV: ArrayBuffer) {
const nRounds = this.keySize + 6;
const invKeySchedule = this.invKeySchedule;
const invSBOX = this.invSBox;
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
const initVector = this.uint8ArrayToUint32Array_(aesIV);
let initVector0 = initVector[0];
let initVector1 = initVector[1];
let initVector2 = initVector[2];
let initVector3 = initVector[3];
const inputInt32 = new Int32Array(inputArrayBuffer);
const outputInt32 = new Int32Array(inputInt32.length);
let t0, t1, t2, t3;
let s0, s1, s2, s3;
let inputWords0, inputWords1, inputWords2, inputWords3;
let ksRow, i;
const swapWord = this.networkToHostOrderSwap;
while (offset < inputInt32.length) {
inputWords0 = swapWord(inputInt32[offset]);
inputWords1 = swapWord(inputInt32[offset + 1]);
inputWords2 = swapWord(inputInt32[offset + 2]);
inputWords3 = swapWord(inputInt32[offset + 3]);
s0 = inputWords0 ^ invKeySchedule[0];
s1 = inputWords3 ^ invKeySchedule[1];
s2 = inputWords2 ^ invKeySchedule[2];
s3 = inputWords1 ^ invKeySchedule[3];
ksRow = 4;
// Iterate through the rounds of decryption
for (i = 1; i < nRounds; i++) {
t0 =
invSubMix0[s0 >>> 24] ^
invSubMix1[(s1 >> 16) & 0xff] ^
invSubMix2[(s2 >> 8) & 0xff] ^
invSubMix3[s3 & 0xff] ^
invKeySchedule[ksRow];
t1 =
invSubMix0[s1 >>> 24] ^
invSubMix1[(s2 >> 16) & 0xff] ^
invSubMix2[(s3 >> 8) & 0xff] ^
invSubMix3[s0 & 0xff] ^
invKeySchedule[ksRow + 1];
t2 =
invSubMix0[s2 >>> 24] ^
invSubMix1[(s3 >> 16) & 0xff] ^
invSubMix2[(s0 >> 8) & 0xff] ^
invSubMix3[s1 & 0xff] ^
invKeySchedule[ksRow + 2];
t3 =
invSubMix0[s3 >>> 24] ^
invSubMix1[(s0 >> 16) & 0xff] ^
invSubMix2[(s1 >> 8) & 0xff] ^
invSubMix3[s2 & 0xff] ^
invKeySchedule[ksRow + 3];
// Update state
s0 = t0;
s1 = t1;
s2 = t2;
s3 = t3;
ksRow = ksRow + 4;
}
// Shift rows, sub bytes, add round key
t0 =
(invSBOX[s0 >>> 24] << 24) ^
(invSBOX[(s1 >> 16) & 0xff] << 16) ^
(invSBOX[(s2 >> 8) & 0xff] << 8) ^
invSBOX[s3 & 0xff] ^
invKeySchedule[ksRow];
t1 =
(invSBOX[s1 >>> 24] << 24) ^
(invSBOX[(s2 >> 16) & 0xff] << 16) ^
(invSBOX[(s3 >> 8) & 0xff] << 8) ^
invSBOX[s0 & 0xff] ^
invKeySchedule[ksRow + 1];
t2 =
(invSBOX[s2 >>> 24] << 24) ^
(invSBOX[(s3 >> 16) & 0xff] << 16) ^
(invSBOX[(s0 >> 8) & 0xff] << 8) ^
invSBOX[s1 & 0xff] ^
invKeySchedule[ksRow + 2];
t3 =
(invSBOX[s3 >>> 24] << 24) ^
(invSBOX[(s0 >> 16) & 0xff] << 16) ^
(invSBOX[(s1 >> 8) & 0xff] << 8) ^
invSBOX[s2 & 0xff] ^
invKeySchedule[ksRow + 3];
// Write
outputInt32[offset] = swapWord(t0 ^ initVector0);
outputInt32[offset + 1] = swapWord(t3 ^ initVector1);
outputInt32[offset + 2] = swapWord(t2 ^ initVector2);
outputInt32[offset + 3] = swapWord(t1 ^ initVector3);
// reset initVector to last 4 unsigned int
initVector0 = inputWords0;
initVector1 = inputWords1;
initVector2 = inputWords2;
initVector3 = inputWords3;
offset = offset + 4;
}
return outputInt32.buffer;
}
}

209
server/node_modules/hls.js/src/crypt/decrypter.ts generated vendored Normal file
View File

@@ -0,0 +1,209 @@
import AESCrypto from './aes-crypto';
import FastAESKey from './fast-aes-key';
import AESDecryptor, { removePadding } from './aes-decryptor';
import { logger } from '../utils/logger';
import { appendUint8Array } from '../utils/mp4-tools';
import { sliceUint8 } from '../utils/typed-array';
import type { HlsConfig } from '../config';
const CHUNK_SIZE = 16; // 16 bytes, 128 bits
export default class Decrypter {
private logEnabled: boolean = true;
private removePKCS7Padding: boolean;
private subtle: SubtleCrypto | null = null;
private softwareDecrypter: AESDecryptor | null = null;
private key: ArrayBuffer | null = null;
private fastAesKey: FastAESKey | null = null;
private remainderData: Uint8Array | null = null;
private currentIV: ArrayBuffer | null = null;
private currentResult: ArrayBuffer | null = null;
private useSoftware: boolean;
constructor(config: HlsConfig, { removePKCS7Padding = true } = {}) {
this.useSoftware = config.enableSoftwareAES;
this.removePKCS7Padding = removePKCS7Padding;
// built in decryptor expects PKCS7 padding
if (removePKCS7Padding) {
try {
const browserCrypto = self.crypto;
if (browserCrypto) {
this.subtle =
browserCrypto.subtle ||
((browserCrypto as any).webkitSubtle as SubtleCrypto);
}
} catch (e) {
/* no-op */
}
}
this.useSoftware = !this.subtle;
}
destroy() {
this.subtle = null;
this.softwareDecrypter = null;
this.key = null;
this.fastAesKey = null;
this.remainderData = null;
this.currentIV = null;
this.currentResult = null;
}
public isSync() {
return this.useSoftware;
}
public flush(): Uint8Array | null {
const { currentResult, remainderData } = this;
if (!currentResult || remainderData) {
this.reset();
return null;
}
const data = new Uint8Array(currentResult);
this.reset();
if (this.removePKCS7Padding) {
return removePadding(data);
}
return data;
}
public reset() {
this.currentResult = null;
this.currentIV = null;
this.remainderData = null;
if (this.softwareDecrypter) {
this.softwareDecrypter = null;
}
}
public decrypt(
data: Uint8Array | ArrayBuffer,
key: ArrayBuffer,
iv: ArrayBuffer,
): Promise<ArrayBuffer> {
if (this.useSoftware) {
return new Promise((resolve, reject) => {
this.softwareDecrypt(new Uint8Array(data), key, iv);
const decryptResult = this.flush();
if (decryptResult) {
resolve(decryptResult.buffer);
} else {
reject(new Error('[softwareDecrypt] Failed to decrypt data'));
}
});
}
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
}
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
// data is handled in the flush() call
public softwareDecrypt(
data: Uint8Array,
key: ArrayBuffer,
iv: ArrayBuffer,
): ArrayBuffer | null {
const { currentIV, currentResult, remainderData } = this;
this.logOnce('JS AES decrypt');
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
// the end on flush(), but by that time we have already received all bytes for the segment.
// Progressive decryption does not work with WebCrypto
if (remainderData) {
data = appendUint8Array(remainderData, data);
this.remainderData = null;
}
// Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
const currentChunk = this.getValidChunk(data);
if (!currentChunk.length) {
return null;
}
if (currentIV) {
iv = currentIV;
}
let softwareDecrypter = this.softwareDecrypter;
if (!softwareDecrypter) {
softwareDecrypter = this.softwareDecrypter = new AESDecryptor();
}
softwareDecrypter.expandKey(key);
const result = currentResult;
this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv);
this.currentIV = sliceUint8(currentChunk, -16).buffer;
if (!result) {
return null;
}
return result;
}
public webCryptoDecrypt(
data: Uint8Array,
key: ArrayBuffer,
iv: ArrayBuffer,
): Promise<ArrayBuffer> {
if (this.key !== key || !this.fastAesKey) {
if (!this.subtle) {
return Promise.resolve(this.onWebCryptoError(data, key, iv));
}
this.key = key;
this.fastAesKey = new FastAESKey(this.subtle, key);
}
return this.fastAesKey
.expandKey()
.then((aesKey) => {
// decrypt using web crypto
if (!this.subtle) {
return Promise.reject(new Error('web crypto not initialized'));
}
this.logOnce('WebCrypto AES decrypt');
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
return crypto.decrypt(data.buffer, aesKey);
})
.catch((err) => {
logger.warn(
`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`,
);
return this.onWebCryptoError(data, key, iv);
});
}
private onWebCryptoError(
data: Uint8Array,
key: ArrayBuffer,
iv: ArrayBuffer,
): ArrayBuffer | never {
this.useSoftware = true;
this.logEnabled = true;
this.softwareDecrypt(data, key, iv);
const decryptResult = this.flush();
if (decryptResult) {
return decryptResult.buffer;
}
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
}
private getValidChunk(data: Uint8Array): Uint8Array {
let currentChunk = data;
const splitPoint = data.length - (data.length % CHUNK_SIZE);
if (splitPoint !== data.length) {
currentChunk = sliceUint8(data, 0, splitPoint);
this.remainderData = sliceUint8(data, splitPoint);
}
return currentChunk;
}
private logOnce(msg: string) {
if (!this.logEnabled) {
return;
}
logger.log(`[decrypter]: ${msg}`);
this.logEnabled = false;
}
}

16
server/node_modules/hls.js/src/crypt/fast-aes-key.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
export default class FastAESKey {
private subtle: SubtleCrypto;
private key: ArrayBuffer;
constructor(subtle: SubtleCrypto, key: ArrayBuffer) {
this.subtle = subtle;
this.key = key;
}
expandKey() {
return this.subtle.importKey('raw', this.key, { name: 'AES-CBC' }, false, [
'encrypt',
'decrypt',
]);
}
}

16
server/node_modules/hls.js/src/define-plugin.d.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
declare const __VERSION__: string;
// Dynamic Modules
declare const __USE_ALT_AUDIO__: boolean;
declare const __USE_EME_DRM__: boolean;
declare const __USE_SUBTITLES__: boolean;
declare const __USE_CMCD__: boolean;
declare const __USE_CONTENT_STEERING__: boolean;
declare const __USE_VARIABLE_SUBSTITUTION__: boolean;
declare const __USE_M2TS_ADVANCED_CODECS__: boolean;
declare const __USE_MEDIA_CAPABILITIES__: boolean;
// __IN_WORKER__ is provided from a closure call around the final UMD bundle.
declare const __IN_WORKER__: boolean;
// __HLS_WORKER_BUNDLE__ is the name of the closure around the final UMD bundle.
declare const __HLS_WORKER_BUNDLE__: Function;

View File

@@ -0,0 +1,95 @@
/**
* AAC demuxer
*/
import BaseAudioDemuxer from './base-audio-demuxer';
import * as ADTS from './adts';
import * as MpegAudio from './mpegaudio';
import { logger } from '../../utils/logger';
import * as ID3 from '../id3';
import type { HlsEventEmitter } from '../../events';
import type { HlsConfig } from '../../config';
class AACDemuxer extends BaseAudioDemuxer {
private readonly observer: HlsEventEmitter;
private readonly config: HlsConfig;
constructor(observer, config) {
super();
this.observer = observer;
this.config = config;
}
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
) {
super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration);
this._audioTrack = {
container: 'audio/adts',
type: 'audio',
id: 2,
pid: -1,
sequenceNumber: 0,
segmentCodec: 'aac',
samples: [],
manifestCodec: audioCodec,
duration: trackDuration,
inputTimeScale: 90000,
dropped: 0,
};
}
// Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
static probe(data: Uint8Array | undefined): boolean {
if (!data) {
return false;
}
// Check for the ADTS sync word
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
// More info https://wiki.multimedia.cx/index.php?title=ADTS
const id3Data = ID3.getID3Data(data, 0);
let offset = id3Data?.length || 0;
if (MpegAudio.probe(data, offset)) {
return false;
}
for (let length = data.length; offset < length; offset++) {
if (ADTS.probe(data, offset)) {
logger.log('ADTS sync word found !');
return true;
}
}
return false;
}
canParse(data, offset) {
return ADTS.canParse(data, offset);
}
appendFrame(track, data, offset) {
ADTS.initTrackConfig(
track,
this.observer,
data,
offset,
track.manifestCodec,
);
const frame = ADTS.appendFrame(
track,
data,
offset,
this.basePTS as number,
this.frameIndex,
);
if (frame && frame.missing === 0) {
return frame;
}
}
}
export default AACDemuxer;

View File

@@ -0,0 +1,169 @@
import BaseAudioDemuxer from './base-audio-demuxer';
import { getID3Data, getTimeStamp } from '../id3';
import { getAudioBSID } from './dolby';
import type { HlsEventEmitter } from '../../events';
import type { AudioFrame, DemuxedAudioTrack } from '../../types/demuxer';
export class AC3Demuxer extends BaseAudioDemuxer {
private readonly observer: HlsEventEmitter;
constructor(observer) {
super();
this.observer = observer;
}
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
) {
super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration);
this._audioTrack = {
container: 'audio/ac-3',
type: 'audio',
id: 2,
pid: -1,
sequenceNumber: 0,
segmentCodec: 'ac3',
samples: [],
manifestCodec: audioCodec,
duration: trackDuration,
inputTimeScale: 90000,
dropped: 0,
};
}
canParse(data: Uint8Array, offset: number): boolean {
return offset + 64 < data.length;
}
appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
): AudioFrame | void {
const frameLength = appendFrame(
track,
data,
offset,
this.basePTS as number,
this.frameIndex,
);
if (frameLength !== -1) {
const sample = track.samples[track.samples.length - 1];
return { sample, length: frameLength, missing: 0 };
}
}
static probe(data: Uint8Array | undefined): boolean {
if (!data) {
return false;
}
const id3Data = getID3Data(data, 0);
if (!id3Data) {
return false;
}
// look for the ac-3 sync bytes
const offset = id3Data.length;
if (
data[offset] === 0x0b &&
data[offset + 1] === 0x77 &&
getTimeStamp(id3Data) !== undefined &&
// check the bsid to confirm ac-3
getAudioBSID(data, offset) < 16
) {
return true;
}
return false;
}
}
export function appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
start: number,
pts: number,
frameIndex: number,
): number {
if (start + 8 > data.length) {
return -1; // not enough bytes left
}
if (data[start] !== 0x0b || data[start + 1] !== 0x77) {
return -1; // invalid magic
}
// get sample rate
const samplingRateCode = data[start + 4] >> 6;
if (samplingRateCode >= 3) {
return -1; // invalid sampling rate
}
const samplingRateMap = [48000, 44100, 32000];
const sampleRate = samplingRateMap[samplingRateCode];
// get frame size
const frameSizeCode = data[start + 4] & 0x3f;
const frameSizeMap = [
64, 69, 96, 64, 70, 96, 80, 87, 120, 80, 88, 120, 96, 104, 144, 96, 105,
144, 112, 121, 168, 112, 122, 168, 128, 139, 192, 128, 140, 192, 160, 174,
240, 160, 175, 240, 192, 208, 288, 192, 209, 288, 224, 243, 336, 224, 244,
336, 256, 278, 384, 256, 279, 384, 320, 348, 480, 320, 349, 480, 384, 417,
576, 384, 418, 576, 448, 487, 672, 448, 488, 672, 512, 557, 768, 512, 558,
768, 640, 696, 960, 640, 697, 960, 768, 835, 1152, 768, 836, 1152, 896, 975,
1344, 896, 976, 1344, 1024, 1114, 1536, 1024, 1115, 1536, 1152, 1253, 1728,
1152, 1254, 1728, 1280, 1393, 1920, 1280, 1394, 1920,
];
const frameLength = frameSizeMap[frameSizeCode * 3 + samplingRateCode] * 2;
if (start + frameLength > data.length) {
return -1;
}
// get channel count
const channelMode = data[start + 6] >> 5;
let skipCount = 0;
if (channelMode === 2) {
skipCount += 2;
} else {
if (channelMode & 1 && channelMode !== 1) {
skipCount += 2;
}
if (channelMode & 4) {
skipCount += 2;
}
}
const lfeon =
(((data[start + 6] << 8) | data[start + 7]) >> (12 - skipCount)) & 1;
const channelsMap = [2, 1, 2, 3, 3, 4, 4, 5];
const channelCount = channelsMap[channelMode] + lfeon;
// build dac3 box
const bsid = data[start + 5] >> 3;
const bsmod = data[start + 5] & 7;
const config = new Uint8Array([
(samplingRateCode << 6) | (bsid << 1) | (bsmod >> 2),
((bsmod & 3) << 6) |
(channelMode << 3) |
(lfeon << 2) |
(frameSizeCode >> 4),
(frameSizeCode << 4) & 0xe0,
]);
const frameDuration = (1536 / sampleRate) * 90000;
const stamp = pts + frameIndex * frameDuration;
const unit = data.subarray(start, start + frameLength);
track.config = config;
track.channelCount = channelCount;
track.samplerate = sampleRate;
track.samples.push({ unit, pts: stamp });
return frameLength;
}

315
server/node_modules/hls.js/src/demux/audio/adts.ts generated vendored Normal file
View File

@@ -0,0 +1,315 @@
/**
* ADTS parser helper
* @link https://wiki.multimedia.cx/index.php?title=ADTS
*/
import { logger } from '../../utils/logger';
import { ErrorTypes, ErrorDetails } from '../../errors';
import type { HlsEventEmitter } from '../../events';
import { Events } from '../../events';
import type {
DemuxedAudioTrack,
AudioFrame,
AudioSample,
} from '../../types/demuxer';
type AudioConfig = {
config: number[];
samplerate: number;
channelCount: number;
codec: string;
manifestCodec: string;
};
type FrameHeader = {
headerLength: number;
frameLength: number;
};
export function getAudioConfig(
observer: HlsEventEmitter,
data: Uint8Array,
offset: number,
audioCodec: string,
): AudioConfig | void {
let adtsObjectType: number;
let adtsExtensionSamplingIndex: number;
let adtsChannelConfig: number;
let config: number[];
const userAgent = navigator.userAgent.toLowerCase();
const manifestCodec = audioCodec;
const adtsSamplingRates = [
96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025,
8000, 7350,
];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
observer.emit(Events.ERROR, Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
fatal: true,
error,
reason: error.message,
});
return;
}
adtsChannelConfig = (data[offset + 2] & 0x01) << 2;
// byte 3
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
logger.log(
`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`,
);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (/firefox/i.test(userAgent)) {
if (adtsSamplingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSamplingIndex = adtsSamplingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSamplingIndex = adtsSamplingIndex;
} else {
/* for other browsers (Chrome/Vivaldi/Opera ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
if (
(audioCodec &&
(audioCodec.indexOf('mp4a.40.29') !== -1 ||
audioCodec.indexOf('mp4a.40.5') !== -1)) ||
(!audioCodec && adtsSamplingIndex >= 6)
) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
// Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
if (
(audioCodec &&
audioCodec.indexOf('mp4a.40.2') !== -1 &&
((adtsSamplingIndex >= 6 && adtsChannelConfig === 1) ||
/vivaldi/i.test(userAgent))) ||
(!audioCodec && adtsChannelConfig === 1)
) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSamplingIndex = adtsSamplingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSamplingIndex & 0x0e) >> 1;
config[1] |= (adtsSamplingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChannelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSamplingIndex
config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1;
config[2] = (adtsExtensionSamplingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {
config,
samplerate: adtsSamplingRates[adtsSamplingIndex],
channelCount: adtsChannelConfig,
codec: 'mp4a.40.' + adtsObjectType,
manifestCodec,
};
}
export function isHeaderPattern(data: Uint8Array, offset: number): boolean {
return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0;
}
export function getHeaderLength(data: Uint8Array, offset: number): number {
return data[offset + 1] & 0x01 ? 7 : 9;
}
export function getFullFrameLength(data: Uint8Array, offset: number): number {
return (
((data[offset + 3] & 0x03) << 11) |
(data[offset + 4] << 3) |
((data[offset + 5] & 0xe0) >>> 5)
);
}
export function canGetFrameLength(data: Uint8Array, offset: number): boolean {
return offset + 5 < data.length;
}
export function isHeader(data: Uint8Array, offset: number): boolean {
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
// More info https://wiki.multimedia.cx/index.php?title=ADTS
return offset + 1 < data.length && isHeaderPattern(data, offset);
}
export function canParse(data: Uint8Array, offset: number): boolean {
return (
canGetFrameLength(data, offset) &&
isHeaderPattern(data, offset) &&
getFullFrameLength(data, offset) <= data.length - offset
);
}
export function probe(data: Uint8Array, offset: number): boolean {
// same as isHeader but we also check that ADTS frame follows last ADTS frame
// or end of data is reached
if (isHeader(data, offset)) {
// ADTS header Length
const headerLength = getHeaderLength(data, offset);
if (offset + headerLength >= data.length) {
return false;
}
// ADTS frame Length
const frameLength = getFullFrameLength(data, offset);
if (frameLength <= headerLength) {
return false;
}
const newOffset = offset + frameLength;
return newOffset === data.length || isHeader(data, newOffset);
}
return false;
}
export function initTrackConfig(
track: DemuxedAudioTrack,
observer: HlsEventEmitter,
data: Uint8Array,
offset: number,
audioCodec: string,
) {
if (!track.samplerate) {
const config = getAudioConfig(observer, data, offset, audioCodec);
if (!config) {
return;
}
track.config = config.config;
track.samplerate = config.samplerate;
track.channelCount = config.channelCount;
track.codec = config.codec;
track.manifestCodec = config.manifestCodec;
logger.log(
`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`,
);
}
}
export function getFrameDuration(samplerate: number): number {
return (1024 * 90000) / samplerate;
}
export function parseFrameHeader(
data: Uint8Array,
offset: number,
): FrameHeader | void {
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
const headerLength = getHeaderLength(data, offset);
if (offset + headerLength <= data.length) {
// retrieve frame size
const frameLength = getFullFrameLength(data, offset) - headerLength;
if (frameLength > 0) {
// logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}`);
return { headerLength, frameLength };
}
}
}
export function appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
pts: number,
frameIndex: number,
): AudioFrame {
const frameDuration = getFrameDuration(track.samplerate as number);
const stamp = pts + frameIndex * frameDuration;
const header = parseFrameHeader(data, offset);
let unit: Uint8Array;
if (header) {
const { frameLength, headerLength } = header;
const length = headerLength + frameLength;
const missing = Math.max(0, offset + length - data.length);
// logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
if (missing) {
unit = new Uint8Array(length - headerLength);
unit.set(data.subarray(offset + headerLength, data.length), 0);
} else {
unit = data.subarray(offset + headerLength, offset + length);
}
const sample: AudioSample = {
unit,
pts: stamp,
};
if (!missing) {
track.samples.push(sample as AudioSample);
}
return { sample, length, missing };
}
// overflow incomplete header
const length = data.length - offset;
unit = new Uint8Array(length);
unit.set(data.subarray(offset, data.length), 0);
const sample: AudioSample = {
unit,
pts: stamp,
};
return { sample, length, missing: -1 };
}

View File

@@ -0,0 +1,197 @@
import * as ID3 from '../id3';
import {
DemuxerResult,
Demuxer,
DemuxedAudioTrack,
AudioFrame,
DemuxedMetadataTrack,
DemuxedVideoTrackBase,
DemuxedUserdataTrack,
KeyData,
MetadataSchema,
} from '../../types/demuxer';
import { dummyTrack } from '../dummy-demuxed-track';
import { appendUint8Array } from '../../utils/mp4-tools';
import { sliceUint8 } from '../../utils/typed-array';
import { RationalTimestamp } from '../../utils/timescale-conversion';
class BaseAudioDemuxer implements Demuxer {
protected _audioTrack!: DemuxedAudioTrack;
protected _id3Track!: DemuxedMetadataTrack;
protected frameIndex: number = 0;
protected cachedData: Uint8Array | null = null;
protected basePTS: number | null = null;
protected initPTS: RationalTimestamp | null = null;
protected lastPTS: number | null = null;
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
) {
this._id3Track = {
type: 'id3',
id: 3,
pid: -1,
inputTimeScale: 90000,
sequenceNumber: 0,
samples: [],
dropped: 0,
};
}
resetTimeStamp(deaultTimestamp: RationalTimestamp | null) {
this.initPTS = deaultTimestamp;
this.resetContiguity();
}
resetContiguity(): void {
this.basePTS = null;
this.lastPTS = null;
this.frameIndex = 0;
}
canParse(data: Uint8Array, offset: number): boolean {
return false;
}
appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
): AudioFrame | void {}
// feed incoming data to the front of the parsing pipeline
demux(data: Uint8Array, timeOffset: number): DemuxerResult {
if (this.cachedData) {
data = appendUint8Array(this.cachedData, data);
this.cachedData = null;
}
let id3Data: Uint8Array | undefined = ID3.getID3Data(data, 0);
let offset = id3Data ? id3Data.length : 0;
let lastDataIndex;
const track = this._audioTrack;
const id3Track = this._id3Track;
const timestamp = id3Data ? ID3.getTimeStamp(id3Data) : undefined;
const length = data.length;
if (
this.basePTS === null ||
(this.frameIndex === 0 && Number.isFinite(timestamp))
) {
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
this.lastPTS = this.basePTS;
}
if (this.lastPTS === null) {
this.lastPTS = this.basePTS;
}
// more expressive than alternative: id3Data?.length
if (id3Data && id3Data.length > 0) {
id3Track.samples.push({
pts: this.lastPTS,
dts: this.lastPTS,
data: id3Data,
type: MetadataSchema.audioId3,
duration: Number.POSITIVE_INFINITY,
});
}
while (offset < length) {
if (this.canParse(data, offset)) {
const frame = this.appendFrame(track, data, offset);
if (frame) {
this.frameIndex++;
this.lastPTS = frame.sample.pts;
offset += frame.length;
lastDataIndex = offset;
} else {
offset = length;
}
} else if (ID3.canParse(data, offset)) {
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
id3Data = ID3.getID3Data(data, offset)!;
id3Track.samples.push({
pts: this.lastPTS,
dts: this.lastPTS,
data: id3Data,
type: MetadataSchema.audioId3,
duration: Number.POSITIVE_INFINITY,
});
offset += id3Data.length;
lastDataIndex = offset;
} else {
offset++;
}
if (offset === length && lastDataIndex !== length) {
const partialData = sliceUint8(data, lastDataIndex);
if (this.cachedData) {
this.cachedData = appendUint8Array(this.cachedData, partialData);
} else {
this.cachedData = partialData;
}
}
}
return {
audioTrack: track,
videoTrack: dummyTrack() as DemuxedVideoTrackBase,
id3Track,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number,
): Promise<DemuxerResult> {
return Promise.reject(
new Error(
`[${this}] This demuxer does not support Sample-AES decryption`,
),
);
}
flush(timeOffset: number): DemuxerResult {
// Parse cache in case of remaining frames.
const cachedData = this.cachedData;
if (cachedData) {
this.cachedData = null;
this.demux(cachedData, 0);
}
return {
audioTrack: this._audioTrack,
videoTrack: dummyTrack() as DemuxedVideoTrackBase,
id3Track: this._id3Track,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
destroy() {}
}
/**
* Initialize PTS
* <p>
* use timestamp unless it is undefined, NaN or Infinity
* </p>
*/
export const initPTSFn = (
timestamp: number | undefined,
timeOffset: number,
initPTS: RationalTimestamp | null,
): number => {
if (Number.isFinite(timestamp as number)) {
return timestamp! * 90;
}
const init90kHz = initPTS
? (initPTS.baseTime * 90000) / initPTS.timescale
: 0;
return timeOffset * 90000 + init90kHz;
};
export default BaseAudioDemuxer;

21
server/node_modules/hls.js/src/demux/audio/dolby.ts generated vendored Normal file
View File

@@ -0,0 +1,21 @@
export const getAudioBSID = (data: Uint8Array, offset: number): number => {
// check the bsid to confirm ac-3 | ec-3
let bsid = 0;
let numBits = 5;
offset += numBits;
const temp = new Uint32Array(1); // unsigned 32 bit for temporary storage
const mask = new Uint32Array(1); // unsigned 32 bit mask value
const byte = new Uint8Array(1); // unsigned 8 bit for temporary storage
while (numBits > 0) {
byte[0] = data[offset];
// read remaining bits, upto 8 bits at a time
const bits = Math.min(numBits, 8);
const shift = 8 - bits;
mask[0] = (0xff000000 >>> (24 + shift)) << shift;
temp[0] = (byte[0] & mask[0]) >> shift;
bsid = !bsid ? temp[0] : (bsid << bits) | temp[0];
offset += 1;
numBits -= bits;
}
return bsid;
};

View File

@@ -0,0 +1,84 @@
/**
* MP3 demuxer
*/
import BaseAudioDemuxer from './base-audio-demuxer';
import { getID3Data, getTimeStamp } from '../id3';
import { getAudioBSID } from './dolby';
import { logger } from '../../utils/logger';
import * as MpegAudio from './mpegaudio';
class MP3Demuxer extends BaseAudioDemuxer {
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
) {
super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration);
this._audioTrack = {
container: 'audio/mpeg',
type: 'audio',
id: 2,
pid: -1,
sequenceNumber: 0,
segmentCodec: 'mp3',
samples: [],
manifestCodec: audioCodec,
duration: trackDuration,
inputTimeScale: 90000,
dropped: 0,
};
}
static probe(data: Uint8Array | undefined): boolean {
if (!data) {
return false;
}
// check if data contains ID3 timestamp and MPEG sync word
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
// More info http://www.mp3-tech.org/programmer/frame_header.html
const id3Data = getID3Data(data, 0);
let offset = id3Data?.length || 0;
// Check for ac-3|ec-3 sync bytes and return false if present
if (
id3Data &&
data[offset] === 0x0b &&
data[offset + 1] === 0x77 &&
getTimeStamp(id3Data) !== undefined &&
// check the bsid to confirm ac-3 or ec-3 (not mp3)
getAudioBSID(data, offset) <= 16
) {
return false;
}
for (let length = data.length; offset < length; offset++) {
if (MpegAudio.probe(data, offset)) {
logger.log('MPEG Audio sync word found !');
return true;
}
}
return false;
}
canParse(data, offset) {
return MpegAudio.canParse(data, offset);
}
appendFrame(track, data, offset) {
if (this.basePTS === null) {
return;
}
return MpegAudio.appendFrame(
track,
data,
offset,
this.basePTS,
this.frameIndex,
);
}
}
export default MP3Demuxer;

177
server/node_modules/hls.js/src/demux/audio/mpegaudio.ts generated vendored Normal file
View File

@@ -0,0 +1,177 @@
/**
* MPEG parser helper
*/
import { DemuxedAudioTrack } from '../../types/demuxer';
let chromeVersion: number | null = null;
const BitratesMap = [
32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56,
64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80,
96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144,
160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144,
160,
];
const SamplingRateMap = [
44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000,
];
const SamplesCoefficients = [
// MPEG 2.5
[
0, // Reserved
72, // Layer3
144, // Layer2
12, // Layer1
],
// Reserved
[
0, // Reserved
0, // Layer3
0, // Layer2
0, // Layer1
],
// MPEG 2
[
0, // Reserved
72, // Layer3
144, // Layer2
12, // Layer1
],
// MPEG 1
[
0, // Reserved
144, // Layer3
144, // Layer2
12, // Layer1
],
];
const BytesInSlot = [
0, // Reserved
1, // Layer3
1, // Layer2
4, // Layer1
];
export function appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
pts: number,
frameIndex: number,
) {
// Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
if (offset + 24 > data.length) {
return;
}
const header = parseHeader(data, offset);
if (header && offset + header.frameLength <= data.length) {
const frameDuration = (header.samplesPerFrame * 90000) / header.sampleRate;
const stamp = pts + frameIndex * frameDuration;
const sample = {
unit: data.subarray(offset, offset + header.frameLength),
pts: stamp,
dts: stamp,
};
track.config = [];
track.channelCount = header.channelCount;
track.samplerate = header.sampleRate;
track.samples.push(sample);
return { sample, length: header.frameLength, missing: 0 };
}
}
export function parseHeader(data: Uint8Array, offset: number) {
const mpegVersion = (data[offset + 1] >> 3) & 3;
const mpegLayer = (data[offset + 1] >> 1) & 3;
const bitRateIndex = (data[offset + 2] >> 4) & 15;
const sampleRateIndex = (data[offset + 2] >> 2) & 3;
if (
mpegVersion !== 1 &&
bitRateIndex !== 0 &&
bitRateIndex !== 15 &&
sampleRateIndex !== 3
) {
const paddingBit = (data[offset + 2] >> 1) & 1;
const channelMode = data[offset + 3] >> 6;
const columnInBitrates =
mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4;
const bitRate =
BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000;
const columnInSampleRates =
mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2;
const sampleRate =
SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex];
const channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono)
const sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer];
const bytesInSlot = BytesInSlot[mpegLayer];
const samplesPerFrame = sampleCoefficient * 8 * bytesInSlot;
const frameLength =
Math.floor((sampleCoefficient * bitRate) / sampleRate + paddingBit) *
bytesInSlot;
if (chromeVersion === null) {
const userAgent = navigator.userAgent || '';
const result = userAgent.match(/Chrome\/(\d+)/i);
chromeVersion = result ? parseInt(result[1]) : 0;
}
const needChromeFix = !!chromeVersion && chromeVersion <= 87;
if (
needChromeFix &&
mpegLayer === 2 &&
bitRate >= 224000 &&
channelMode === 0
) {
// Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
data[offset + 3] = data[offset + 3] | 0x80;
}
return { sampleRate, channelCount, frameLength, samplesPerFrame };
}
}
export function isHeaderPattern(data: Uint8Array, offset: number): boolean {
return (
data[offset] === 0xff &&
(data[offset + 1] & 0xe0) === 0xe0 &&
(data[offset + 1] & 0x06) !== 0x00
);
}
export function isHeader(data: Uint8Array, offset: number): boolean {
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
// More info http://www.mp3-tech.org/programmer/frame_header.html
return offset + 1 < data.length && isHeaderPattern(data, offset);
}
export function canParse(data: Uint8Array, offset: number): boolean {
const headerSize = 4;
return isHeaderPattern(data, offset) && headerSize <= data.length - offset;
}
export function probe(data: Uint8Array, offset: number): boolean {
// same as isHeader but we also check that MPEG frame follows last MPEG frame
// or end of data is reached
if (offset + 1 < data.length && isHeaderPattern(data, offset)) {
// MPEG header Length
const headerLength = 4;
// MPEG frame Length
const header = parseHeader(data, offset);
let frameLength = headerLength;
if (header?.frameLength) {
frameLength = header.frameLength;
}
const newOffset = offset + frameLength;
return newOffset === data.length || isHeader(data, newOffset);
}
return false;
}

42
server/node_modules/hls.js/src/demux/chunk-cache.ts generated vendored Normal file
View File

@@ -0,0 +1,42 @@
export default class ChunkCache {
private chunks: Array<Uint8Array> = [];
public dataLength: number = 0;
push(chunk: Uint8Array) {
this.chunks.push(chunk);
this.dataLength += chunk.length;
}
flush(): Uint8Array {
const { chunks, dataLength } = this;
let result;
if (!chunks.length) {
return new Uint8Array(0);
} else if (chunks.length === 1) {
result = chunks[0];
} else {
result = concatUint8Arrays(chunks, dataLength);
}
this.reset();
return result;
}
reset() {
this.chunks.length = 0;
this.dataLength = 0;
}
}
function concatUint8Arrays(
chunks: Array<Uint8Array>,
dataLength: number,
): Uint8Array {
const result = new Uint8Array(dataLength);
let offset = 0;
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}

View File

@@ -0,0 +1,13 @@
import type { DemuxedTrack } from '../types/demuxer';
export function dummyTrack(type = '', inputTimeScale = 90000): DemuxedTrack {
return {
type,
id: -1,
pid: -1,
inputTimeScale,
sequenceNumber: -1,
samples: [],
dropped: 0,
};
}

411
server/node_modules/hls.js/src/demux/id3.ts generated vendored Normal file
View File

@@ -0,0 +1,411 @@
type RawFrame = { type: string; size: number; data: Uint8Array };
// breaking up those two types in order to clarify what is happening in the decoding path.
type DecodedFrame<T> = { key: string; data: T; info?: any };
export type Frame = DecodedFrame<ArrayBuffer | string>;
/**
* Returns true if an ID3 header can be found at offset in data
* @param data - The data to search
* @param offset - The offset at which to start searching
*/
export const isHeader = (data: Uint8Array, offset: number): boolean => {
/*
* http://id3.org/id3v2.3.0
* [0] = 'I'
* [1] = 'D'
* [2] = '3'
* [3,4] = {Version}
* [5] = {Flags}
* [6-9] = {ID3 Size}
*
* An ID3v2 tag can be detected with the following pattern:
* $49 44 33 yy yy xx zz zz zz zz
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
*/
if (offset + 10 <= data.length) {
// look for 'ID3' identifier
if (
data[offset] === 0x49 &&
data[offset + 1] === 0x44 &&
data[offset + 2] === 0x33
) {
// check version is within range
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
// check size is within range
if (
data[offset + 6] < 0x80 &&
data[offset + 7] < 0x80 &&
data[offset + 8] < 0x80 &&
data[offset + 9] < 0x80
) {
return true;
}
}
}
}
return false;
};
/**
* Returns true if an ID3 footer can be found at offset in data
* @param data - The data to search
* @param offset - The offset at which to start searching
*/
export const isFooter = (data: Uint8Array, offset: number): boolean => {
/*
* The footer is a copy of the header, but with a different identifier
*/
if (offset + 10 <= data.length) {
// look for '3DI' identifier
if (
data[offset] === 0x33 &&
data[offset + 1] === 0x44 &&
data[offset + 2] === 0x49
) {
// check version is within range
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
// check size is within range
if (
data[offset + 6] < 0x80 &&
data[offset + 7] < 0x80 &&
data[offset + 8] < 0x80 &&
data[offset + 9] < 0x80
) {
return true;
}
}
}
}
return false;
};
/**
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
* @param data - The data to search in
* @param offset - The offset at which to start searching
* @returns the block of data containing any ID3 tags found
* or *undefined* if no header is found at the starting offset
*/
export const getID3Data = (
data: Uint8Array,
offset: number,
): Uint8Array | undefined => {
const front = offset;
let length = 0;
while (isHeader(data, offset)) {
// ID3 header is 10 bytes
length += 10;
const size = readSize(data, offset + 6);
length += size;
if (isFooter(data, offset + 10)) {
// ID3 footer is 10 bytes
length += 10;
}
offset += length;
}
if (length > 0) {
return data.subarray(front, front + length);
}
return undefined;
};
const readSize = (data: Uint8Array, offset: number): number => {
let size = 0;
size = (data[offset] & 0x7f) << 21;
size |= (data[offset + 1] & 0x7f) << 14;
size |= (data[offset + 2] & 0x7f) << 7;
size |= data[offset + 3] & 0x7f;
return size;
};
export const canParse = (data: Uint8Array, offset: number): boolean => {
return (
isHeader(data, offset) &&
readSize(data, offset + 6) + 10 <= data.length - offset
);
};
/**
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
* @param data - Block of data containing one or more ID3 tags
*/
export const getTimeStamp = (data: Uint8Array): number | undefined => {
const frames: Frame[] = getID3Frames(data);
for (let i = 0; i < frames.length; i++) {
const frame = frames[i];
if (isTimeStampFrame(frame)) {
return readTimeStamp(frame as DecodedFrame<ArrayBuffer>);
}
}
return undefined;
};
/**
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
*/
export const isTimeStampFrame = (frame: Frame): boolean => {
return (
frame &&
frame.key === 'PRIV' &&
frame.info === 'com.apple.streaming.transportStreamTimestamp'
);
};
const getFrameData = (data: Uint8Array): RawFrame => {
/*
Frame ID $xx xx xx xx (four characters)
Size $xx xx xx xx
Flags $xx xx
*/
const type: string = String.fromCharCode(data[0], data[1], data[2], data[3]);
const size: number = readSize(data, 4);
// skip frame id, size, and flags
const offset = 10;
return { type, size, data: data.subarray(offset, offset + size) };
};
/**
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
* @param id3Data - The ID3 data containing one or more ID3 tags
*/
export const getID3Frames = (id3Data: Uint8Array): Frame[] => {
let offset = 0;
const frames: Frame[] = [];
while (isHeader(id3Data, offset)) {
const size = readSize(id3Data, offset + 6);
// skip past ID3 header
offset += 10;
const end = offset + size;
// loop through frames in the ID3 tag
while (offset + 8 < end) {
const frameData: RawFrame = getFrameData(id3Data.subarray(offset));
const frame: Frame | undefined = decodeFrame(frameData);
if (frame) {
frames.push(frame);
}
// skip frame header and frame data
offset += frameData.size + 10;
}
if (isFooter(id3Data, offset)) {
offset += 10;
}
}
return frames;
};
export const decodeFrame = (frame: RawFrame): Frame | undefined => {
if (frame.type === 'PRIV') {
return decodePrivFrame(frame);
} else if (frame.type[0] === 'W') {
return decodeURLFrame(frame);
}
return decodeTextFrame(frame);
};
const decodePrivFrame = (
frame: RawFrame,
): DecodedFrame<ArrayBuffer> | undefined => {
/*
Format: <text string>\0<binary data>
*/
if (frame.size < 2) {
return undefined;
}
const owner = utf8ArrayToStr(frame.data, true);
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
return { key: frame.type, info: owner, data: privateData.buffer };
};
const decodeTextFrame = (frame: RawFrame): DecodedFrame<string> | undefined => {
if (frame.size < 2) {
return undefined;
}
if (frame.type === 'TXXX') {
/*
Format:
[0] = {Text Encoding}
[1-?] = {Description}\0{Value}
*/
let index = 1;
const description = utf8ArrayToStr(frame.data.subarray(index), true);
index += description.length + 1;
const value = utf8ArrayToStr(frame.data.subarray(index));
return { key: frame.type, info: description, data: value };
}
/*
Format:
[0] = {Text Encoding}
[1-?] = {Value}
*/
const text = utf8ArrayToStr(frame.data.subarray(1));
return { key: frame.type, data: text };
};
const decodeURLFrame = (frame: RawFrame): DecodedFrame<string> | undefined => {
if (frame.type === 'WXXX') {
/*
Format:
[0] = {Text Encoding}
[1-?] = {Description}\0{URL}
*/
if (frame.size < 2) {
return undefined;
}
let index = 1;
const description: string = utf8ArrayToStr(
frame.data.subarray(index),
true,
);
index += description.length + 1;
const value: string = utf8ArrayToStr(frame.data.subarray(index));
return { key: frame.type, info: description, data: value };
}
/*
Format:
[0-?] = {URL}
*/
const url: string = utf8ArrayToStr(frame.data);
return { key: frame.type, data: url };
};
const readTimeStamp = (
timeStampFrame: DecodedFrame<ArrayBuffer>,
): number | undefined => {
if (timeStampFrame.data.byteLength === 8) {
const data = new Uint8Array(timeStampFrame.data);
// timestamp is 33 bit expressed as a big-endian eight-octet number,
// with the upper 31 bits set to zero.
const pts33Bit = data[3] & 0x1;
let timestamp =
(data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
timestamp /= 45;
if (pts33Bit) {
timestamp += 47721858.84;
} // 2^32 / 90
return Math.round(timestamp);
}
return undefined;
};
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
/* utf.js - UTF-8 <=> UTF-16 convertion
*
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
* Version: 1.0
* LastModified: Dec 25 1999
* This library is free. You can redistribute it and/or modify it.
*/
export const utf8ArrayToStr = (
array: Uint8Array,
exitOnNull: boolean = false,
): string => {
const decoder = getTextDecoder();
if (decoder) {
const decoded = decoder.decode(array);
if (exitOnNull) {
// grab up to the first null
const idx = decoded.indexOf('\0');
return idx !== -1 ? decoded.substring(0, idx) : decoded;
}
// remove any null characters
return decoded.replace(/\0/g, '');
}
const len = array.length;
let c;
let char2;
let char3;
let out = '';
let i = 0;
while (i < len) {
c = array[i++];
if (c === 0x00 && exitOnNull) {
return out;
} else if (c === 0x00 || c === 0x03) {
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
continue;
}
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
out += String.fromCharCode(c);
break;
case 12:
case 13:
// 110x xxxx 10xx xxxx
char2 = array[i++];
out += String.fromCharCode(((c & 0x1f) << 6) | (char2 & 0x3f));
break;
case 14:
// 1110 xxxx 10xx xxxx 10xx xxxx
char2 = array[i++];
char3 = array[i++];
out += String.fromCharCode(
((c & 0x0f) << 12) | ((char2 & 0x3f) << 6) | ((char3 & 0x3f) << 0),
);
break;
default:
}
}
return out;
};
export const testables = {
decodeTextFrame: decodeTextFrame,
};
let decoder: TextDecoder;
function getTextDecoder() {
// On Play Station 4, TextDecoder is defined but partially implemented.
// Manual decoding option is preferable
if (navigator.userAgent.includes('PlayStation 4')) {
return;
}
if (!decoder && typeof self.TextDecoder !== 'undefined') {
decoder = new self.TextDecoder('utf-8');
}
return decoder;
}

41
server/node_modules/hls.js/src/demux/inject-worker.ts generated vendored Normal file
View File

@@ -0,0 +1,41 @@
// ensure the worker ends up in the bundle
// If the worker should not be included this gets aliased to empty.js
import './transmuxer-worker';
export function hasUMDWorker(): boolean {
return typeof __HLS_WORKER_BUNDLE__ === 'function';
}
export type WorkerContext = {
worker: Worker;
objectURL?: string;
scriptURL?: string;
};
export function injectWorker(): WorkerContext {
const blob = new self.Blob(
[
`var exports={};var module={exports:exports};function define(f){f()};define.amd=true;(${__HLS_WORKER_BUNDLE__.toString()})(true);`,
],
{
type: 'text/javascript',
},
);
const objectURL = self.URL.createObjectURL(blob);
const worker = new self.Worker(objectURL);
return {
worker,
objectURL,
};
}
export function loadWorker(path: string): WorkerContext {
const scriptURL = new self.URL(path, self.location.href).href;
const worker = new self.Worker(scriptURL);
return {
worker,
scriptURL,
};
}

200
server/node_modules/hls.js/src/demux/mp4demuxer.ts generated vendored Normal file
View File

@@ -0,0 +1,200 @@
/**
* MP4 demuxer
*/
import {
Demuxer,
DemuxerResult,
PassthroughTrack,
DemuxedAudioTrack,
DemuxedUserdataTrack,
DemuxedMetadataTrack,
KeyData,
MetadataSchema,
} from '../types/demuxer';
import {
findBox,
segmentValidRange,
appendUint8Array,
parseEmsg,
parseSamples,
parseInitSegment,
RemuxerTrackIdConfig,
hasMoofData,
} from '../utils/mp4-tools';
import { dummyTrack } from './dummy-demuxed-track';
import type { HlsEventEmitter } from '../events';
import type { HlsConfig } from '../config';
const emsgSchemePattern = /\/emsg[-/]ID3/i;
class MP4Demuxer implements Demuxer {
private remainderData: Uint8Array | null = null;
private timeOffset: number = 0;
private config: HlsConfig;
private videoTrack?: PassthroughTrack;
private audioTrack?: DemuxedAudioTrack;
private id3Track?: DemuxedMetadataTrack;
private txtTrack?: DemuxedUserdataTrack;
constructor(observer: HlsEventEmitter, config: HlsConfig) {
this.config = config;
}
public resetTimeStamp() {}
public resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
) {
const videoTrack = (this.videoTrack = dummyTrack(
'video',
1,
) as PassthroughTrack);
const audioTrack = (this.audioTrack = dummyTrack(
'audio',
1,
) as DemuxedAudioTrack);
const captionTrack = (this.txtTrack = dummyTrack(
'text',
1,
) as DemuxedUserdataTrack);
this.id3Track = dummyTrack('id3', 1) as DemuxedMetadataTrack;
this.timeOffset = 0;
if (!initSegment?.byteLength) {
return;
}
const initData = parseInitSegment(initSegment);
if (initData.video) {
const { id, timescale, codec } = initData.video;
videoTrack.id = id;
videoTrack.timescale = captionTrack.timescale = timescale;
videoTrack.codec = codec;
}
if (initData.audio) {
const { id, timescale, codec } = initData.audio;
audioTrack.id = id;
audioTrack.timescale = timescale;
audioTrack.codec = codec;
}
captionTrack.id = RemuxerTrackIdConfig.text;
videoTrack.sampleDuration = 0;
videoTrack.duration = audioTrack.duration = trackDuration;
}
public resetContiguity(): void {
this.remainderData = null;
}
static probe(data: Uint8Array) {
return hasMoofData(data);
}
public demux(data: Uint8Array, timeOffset: number): DemuxerResult {
this.timeOffset = timeOffset;
// Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
let videoSamples = data;
const videoTrack = this.videoTrack as PassthroughTrack;
const textTrack = this.txtTrack as DemuxedUserdataTrack;
if (this.config.progressive) {
// Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
// This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
// that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
if (this.remainderData) {
videoSamples = appendUint8Array(this.remainderData, data);
}
const segmentedData = segmentValidRange(videoSamples);
this.remainderData = segmentedData.remainder;
videoTrack.samples = segmentedData.valid || new Uint8Array();
} else {
videoTrack.samples = videoSamples;
}
const id3Track = this.extractID3Track(videoTrack, timeOffset);
textTrack.samples = parseSamples(timeOffset, videoTrack);
return {
videoTrack,
audioTrack: this.audioTrack as DemuxedAudioTrack,
id3Track,
textTrack: this.txtTrack as DemuxedUserdataTrack,
};
}
public flush() {
const timeOffset = this.timeOffset;
const videoTrack = this.videoTrack as PassthroughTrack;
const textTrack = this.txtTrack as DemuxedUserdataTrack;
videoTrack.samples = this.remainderData || new Uint8Array();
this.remainderData = null;
const id3Track = this.extractID3Track(videoTrack, this.timeOffset);
textTrack.samples = parseSamples(timeOffset, videoTrack);
return {
videoTrack,
audioTrack: dummyTrack() as DemuxedAudioTrack,
id3Track,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
private extractID3Track(
videoTrack: PassthroughTrack,
timeOffset: number,
): DemuxedMetadataTrack {
const id3Track = this.id3Track as DemuxedMetadataTrack;
if (videoTrack.samples.length) {
const emsgs = findBox(videoTrack.samples, ['emsg']);
if (emsgs) {
emsgs.forEach((data: Uint8Array) => {
const emsgInfo = parseEmsg(data);
if (emsgSchemePattern.test(emsgInfo.schemeIdUri)) {
const pts = Number.isFinite(emsgInfo.presentationTime)
? emsgInfo.presentationTime! / emsgInfo.timeScale
: timeOffset +
emsgInfo.presentationTimeDelta! / emsgInfo.timeScale;
let duration =
emsgInfo.eventDuration === 0xffffffff
? Number.POSITIVE_INFINITY
: emsgInfo.eventDuration / emsgInfo.timeScale;
// Safari takes anything <= 0.001 seconds and maps it to Infinity
if (duration <= 0.001) {
duration = Number.POSITIVE_INFINITY;
}
const payload = emsgInfo.payload;
id3Track.samples.push({
data: payload,
len: payload.byteLength,
dts: pts,
pts: pts,
type: MetadataSchema.emsg,
duration: duration,
});
}
});
}
}
return id3Track;
}
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number,
): Promise<DemuxerResult> {
return Promise.reject(
new Error('The MP4 demuxer does not support SAMPLE-AES decryption'),
);
}
destroy() {}
}
export default MP4Demuxer;

197
server/node_modules/hls.js/src/demux/sample-aes.ts generated vendored Normal file
View File

@@ -0,0 +1,197 @@
/**
* SAMPLE-AES decrypter
*/
import { HlsConfig } from '../config';
import Decrypter from '../crypt/decrypter';
import { HlsEventEmitter } from '../events';
import type {
AudioSample,
VideoSample,
VideoSampleUnit,
DemuxedVideoTrackBase,
KeyData,
} from '../types/demuxer';
import { discardEPB } from '../utils/mp4-tools';
class SampleAesDecrypter {
private keyData: KeyData;
private decrypter: Decrypter;
constructor(observer: HlsEventEmitter, config: HlsConfig, keyData: KeyData) {
this.keyData = keyData;
this.decrypter = new Decrypter(config, {
removePKCS7Padding: false,
});
}
decryptBuffer(encryptedData: Uint8Array | ArrayBuffer): Promise<ArrayBuffer> {
return this.decrypter.decrypt(
encryptedData,
this.keyData.key.buffer,
this.keyData.iv.buffer,
);
}
// AAC - encrypt all full 16 bytes blocks starting from offset 16
private decryptAacSample(
samples: AudioSample[],
sampleIndex: number,
callback: () => void,
) {
const curUnit = samples[sampleIndex].unit;
if (curUnit.length <= 16) {
// No encrypted portion in this sample (first 16 bytes is not
// encrypted, see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/HLS_Sample_Encryption/Encryption/Encryption.html),
return;
}
const encryptedData = curUnit.subarray(
16,
curUnit.length - (curUnit.length % 16),
);
const encryptedBuffer = encryptedData.buffer.slice(
encryptedData.byteOffset,
encryptedData.byteOffset + encryptedData.length,
);
this.decryptBuffer(encryptedBuffer).then((decryptedBuffer: ArrayBuffer) => {
const decryptedData = new Uint8Array(decryptedBuffer);
curUnit.set(decryptedData, 16);
if (!this.decrypter.isSync()) {
this.decryptAacSamples(samples, sampleIndex + 1, callback);
}
});
}
decryptAacSamples(
samples: AudioSample[],
sampleIndex: number,
callback: () => void,
) {
for (; ; sampleIndex++) {
if (sampleIndex >= samples.length) {
callback();
return;
}
if (samples[sampleIndex].unit.length < 32) {
continue;
}
this.decryptAacSample(samples, sampleIndex, callback);
if (!this.decrypter.isSync()) {
return;
}
}
}
// AVC - encrypt one 16 bytes block out of ten, starting from offset 32
getAvcEncryptedData(decodedData: Uint8Array) {
const encryptedDataLen =
Math.floor((decodedData.length - 48) / 160) * 16 + 16;
const encryptedData = new Int8Array(encryptedDataLen);
let outputPos = 0;
for (
let inputPos = 32;
inputPos < decodedData.length - 16;
inputPos += 160, outputPos += 16
) {
encryptedData.set(
decodedData.subarray(inputPos, inputPos + 16),
outputPos,
);
}
return encryptedData;
}
getAvcDecryptedUnit(
decodedData: Uint8Array,
decryptedData: ArrayLike<number> | ArrayBuffer | SharedArrayBuffer,
) {
const uint8DecryptedData = new Uint8Array(decryptedData);
let inputPos = 0;
for (
let outputPos = 32;
outputPos < decodedData.length - 16;
outputPos += 160, inputPos += 16
) {
decodedData.set(
uint8DecryptedData.subarray(inputPos, inputPos + 16),
outputPos,
);
}
return decodedData;
}
decryptAvcSample(
samples: VideoSample[],
sampleIndex: number,
unitIndex: number,
callback: () => void,
curUnit: VideoSampleUnit,
) {
const decodedData = discardEPB(curUnit.data);
const encryptedData = this.getAvcEncryptedData(decodedData);
this.decryptBuffer(encryptedData.buffer).then(
(decryptedBuffer: ArrayBuffer) => {
curUnit.data = this.getAvcDecryptedUnit(decodedData, decryptedBuffer);
if (!this.decrypter.isSync()) {
this.decryptAvcSamples(samples, sampleIndex, unitIndex + 1, callback);
}
},
);
}
decryptAvcSamples(
samples: DemuxedVideoTrackBase['samples'],
sampleIndex: number,
unitIndex: number,
callback: () => void,
) {
if (samples instanceof Uint8Array) {
throw new Error('Cannot decrypt samples of type Uint8Array');
}
for (; ; sampleIndex++, unitIndex = 0) {
if (sampleIndex >= samples.length) {
callback();
return;
}
const curUnits = samples[sampleIndex].units;
for (; ; unitIndex++) {
if (unitIndex >= curUnits.length) {
break;
}
const curUnit = curUnits[unitIndex];
if (
curUnit.data.length <= 48 ||
(curUnit.type !== 1 && curUnit.type !== 5)
) {
continue;
}
this.decryptAvcSample(
samples,
sampleIndex,
unitIndex,
callback,
curUnit,
);
if (!this.decrypter.isSync()) {
return;
}
}
}
}
}
export default SampleAesDecrypter;

View File

@@ -0,0 +1,424 @@
import {
WorkerContext,
hasUMDWorker,
injectWorker,
loadWorker,
} from './inject-worker';
import { Events } from '../events';
import Transmuxer, {
TransmuxConfig,
TransmuxState,
isPromise,
} from '../demux/transmuxer';
import { logger } from '../utils/logger';
import { ErrorTypes, ErrorDetails } from '../errors';
import { getMediaSource } from '../utils/mediasource-helper';
import { EventEmitter } from 'eventemitter3';
import { Fragment, Part } from '../loader/fragment';
import type { ChunkMetadata, TransmuxerResult } from '../types/transmuxer';
import type Hls from '../hls';
import type { HlsEventEmitter, HlsListeners } from '../events';
import type { PlaylistLevelType } from '../types/loader';
import type { TypeSupported } from './tsdemuxer';
import type { RationalTimestamp } from '../utils/timescale-conversion';
export default class TransmuxerInterface {
public error: Error | null = null;
private hls: Hls;
private id: PlaylistLevelType;
private observer: HlsEventEmitter;
private frag: Fragment | null = null;
private part: Part | null = null;
private useWorker: boolean;
private workerContext: WorkerContext | null = null;
private onwmsg?: (
event: MessageEvent<{ event: string; data?: any } | null>,
) => void;
private transmuxer: Transmuxer | null = null;
private onTransmuxComplete: (transmuxResult: TransmuxerResult) => void;
private onFlush: (chunkMeta: ChunkMetadata) => void;
constructor(
hls: Hls,
id: PlaylistLevelType,
onTransmuxComplete: (transmuxResult: TransmuxerResult) => void,
onFlush: (chunkMeta: ChunkMetadata) => void,
) {
const config = hls.config;
this.hls = hls;
this.id = id;
this.useWorker = !!config.enableWorker;
this.onTransmuxComplete = onTransmuxComplete;
this.onFlush = onFlush;
const forwardMessage = (ev, data) => {
data = data || {};
data.frag = this.frag;
data.id = this.id;
if (ev === Events.ERROR) {
this.error = data.error;
}
this.hls.trigger(ev, data);
};
// forward events to main thread
this.observer = new EventEmitter() as HlsEventEmitter;
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
this.observer.on(Events.ERROR, forwardMessage);
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
isTypeSupported: () => false,
};
const m2tsTypeSupported: TypeSupported = {
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
ac3: __USE_M2TS_ADVANCED_CODECS__
? MediaSource.isTypeSupported('audio/mp4; codecs="ac-3"')
: false,
};
if (this.useWorker && typeof Worker !== 'undefined') {
const canCreateWorker = config.workerPath || hasUMDWorker();
if (canCreateWorker) {
try {
if (config.workerPath) {
logger.log(`loading Web Worker ${config.workerPath} for "${id}"`);
this.workerContext = loadWorker(config.workerPath);
} else {
logger.log(`injecting Web Worker for "${id}"`);
this.workerContext = injectWorker();
}
this.onwmsg = (event) => this.onWorkerMessage(event);
const { worker } = this.workerContext;
worker.addEventListener('message', this.onwmsg);
worker.onerror = (event) => {
const error = new Error(
`${event.message} (${event.filename}:${event.lineno})`,
);
config.enableWorker = false;
logger.warn(`Error in "${id}" Web Worker, fallback to inline`);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.OTHER_ERROR,
details: ErrorDetails.INTERNAL_EXCEPTION,
fatal: false,
event: 'demuxerWorker',
error,
});
};
worker.postMessage({
cmd: 'init',
typeSupported: m2tsTypeSupported,
vendor: '',
id: id,
config: JSON.stringify(config),
});
} catch (err) {
logger.warn(
`Error setting up "${id}" Web Worker, fallback to inline`,
err,
);
this.resetWorker();
this.error = null;
this.transmuxer = new Transmuxer(
this.observer,
m2tsTypeSupported,
config,
'',
id,
);
}
return;
}
}
this.transmuxer = new Transmuxer(
this.observer,
m2tsTypeSupported,
config,
'',
id,
);
}
resetWorker() {
if (this.workerContext) {
const { worker, objectURL } = this.workerContext;
if (objectURL) {
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
self.URL.revokeObjectURL(objectURL);
}
worker.removeEventListener('message', this.onwmsg as any);
worker.onerror = null;
worker.terminate();
this.workerContext = null;
}
}
destroy() {
if (this.workerContext) {
this.resetWorker();
this.onwmsg = undefined;
} else {
const transmuxer = this.transmuxer;
if (transmuxer) {
transmuxer.destroy();
this.transmuxer = null;
}
}
const observer = this.observer;
if (observer) {
observer.removeAllListeners();
}
this.frag = null;
// @ts-ignore
this.observer = null;
// @ts-ignore
this.hls = null;
}
push(
data: ArrayBuffer,
initSegmentData: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
frag: Fragment,
part: Part | null,
duration: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata,
defaultInitPTS?: RationalTimestamp,
) {
chunkMeta.transmuxing.start = self.performance.now();
const { transmuxer } = this;
const timeOffset = part ? part.start : frag.start;
// TODO: push "clear-lead" decrypt data for unencrypted fragments in streams with encrypted ones
const decryptdata = frag.decryptdata;
const lastFrag = this.frag;
const discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
const trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
const snDiff = lastFrag ? chunkMeta.sn - (lastFrag.sn as number) : -1;
const partDiff = this.part ? chunkMeta.part - this.part.index : -1;
const progressive =
snDiff === 0 &&
chunkMeta.id > 1 &&
chunkMeta.id === lastFrag?.stats.chunkCount;
const contiguous =
!trackSwitch &&
(snDiff === 1 ||
(snDiff === 0 && (partDiff === 1 || (progressive && partDiff <= 0))));
const now = self.performance.now();
if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
frag.stats.parsing.start = now;
}
if (part && (partDiff || !contiguous)) {
part.stats.parsing.start = now;
}
const initSegmentChange = !(
lastFrag && frag.initSegment?.url === lastFrag.initSegment?.url
);
const state = new TransmuxState(
discontinuity,
contiguous,
accurateTimeOffset,
trackSwitch,
timeOffset,
initSegmentChange,
);
if (!contiguous || discontinuity || initSegmentChange) {
logger.log(`[transmuxer-interface, ${frag.type}]: Starting new transmux session for sn: ${chunkMeta.sn} p: ${chunkMeta.part} level: ${chunkMeta.level} id: ${chunkMeta.id}
discontinuity: ${discontinuity}
trackSwitch: ${trackSwitch}
contiguous: ${contiguous}
accurateTimeOffset: ${accurateTimeOffset}
timeOffset: ${timeOffset}
initSegmentChange: ${initSegmentChange}`);
const config = new TransmuxConfig(
audioCodec,
videoCodec,
initSegmentData,
duration,
defaultInitPTS,
);
this.configureTransmuxer(config);
}
this.frag = frag;
this.part = part;
// Frags with sn of 'initSegment' are not transmuxed
if (this.workerContext) {
// post fragment payload as transferable objects for ArrayBuffer (no copy)
this.workerContext.worker.postMessage(
{
cmd: 'demux',
data,
decryptdata,
chunkMeta,
state,
},
data instanceof ArrayBuffer ? [data] : [],
);
} else if (transmuxer) {
const transmuxResult = transmuxer.push(
data,
decryptdata,
chunkMeta,
state,
);
if (isPromise(transmuxResult)) {
transmuxer.async = true;
transmuxResult
.then((data) => {
this.handleTransmuxComplete(data);
})
.catch((error) => {
this.transmuxerError(
error,
chunkMeta,
'transmuxer-interface push error',
);
});
} else {
transmuxer.async = false;
this.handleTransmuxComplete(transmuxResult as TransmuxerResult);
}
}
}
flush(chunkMeta: ChunkMetadata) {
chunkMeta.transmuxing.start = self.performance.now();
const { transmuxer } = this;
if (this.workerContext) {
1;
this.workerContext.worker.postMessage({
cmd: 'flush',
chunkMeta,
});
} else if (transmuxer) {
let transmuxResult = transmuxer.flush(chunkMeta);
const asyncFlush = isPromise(transmuxResult);
if (asyncFlush || transmuxer.async) {
if (!isPromise(transmuxResult)) {
transmuxResult = Promise.resolve(transmuxResult);
}
transmuxResult
.then((data) => {
this.handleFlushResult(data, chunkMeta);
})
.catch((error) => {
this.transmuxerError(
error,
chunkMeta,
'transmuxer-interface flush error',
);
});
} else {
this.handleFlushResult(
transmuxResult as Array<TransmuxerResult>,
chunkMeta,
);
}
}
}
private transmuxerError(
error: Error,
chunkMeta: ChunkMetadata,
reason: string,
) {
if (!this.hls) {
return;
}
this.error = error;
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
chunkMeta,
frag: this.frag || undefined,
fatal: false,
error,
err: error,
reason,
});
}
private handleFlushResult(
results: Array<TransmuxerResult>,
chunkMeta: ChunkMetadata,
) {
results.forEach((result) => {
this.handleTransmuxComplete(result);
});
this.onFlush(chunkMeta);
}
private onWorkerMessage(
event: MessageEvent<{ event: string; data?: any } | null>,
) {
const data = event.data;
if (!data?.event) {
logger.warn(
`worker message received with no ${data ? 'event name' : 'data'}`,
);
return;
}
const hls = this.hls;
if (!this.hls) {
return;
}
switch (data.event) {
case 'init': {
const objectURL = this.workerContext?.objectURL;
if (objectURL) {
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
self.URL.revokeObjectURL(objectURL);
}
break;
}
case 'transmuxComplete': {
this.handleTransmuxComplete(data.data);
break;
}
case 'flush': {
this.onFlush(data.data);
break;
}
// pass logs from the worker thread to the main logger
case 'workerLog':
if (logger[data.data.logType]) {
logger[data.data.logType](data.data.message);
}
break;
default: {
data.data = data.data || {};
data.data.frag = this.frag;
data.data.id = this.id;
hls.trigger(data.event as keyof HlsListeners, data.data);
break;
}
}
}
private configureTransmuxer(config: TransmuxConfig) {
const { transmuxer } = this;
if (this.workerContext) {
this.workerContext.worker.postMessage({
cmd: 'configure',
config,
});
} else if (transmuxer) {
transmuxer.configure(config);
}
}
private handleTransmuxComplete(result: TransmuxerResult) {
result.chunkMeta.transmuxing.end = self.performance.now();
this.onTransmuxComplete(result);
}
}

View File

@@ -0,0 +1,187 @@
import Transmuxer, { isPromise } from '../demux/transmuxer';
import { Events } from '../events';
import { ILogFunction, enableLogs, logger } from '../utils/logger';
import { EventEmitter } from 'eventemitter3';
import { ErrorDetails, ErrorTypes } from '../errors';
import type { RemuxedTrack, RemuxerResult } from '../types/remuxer';
import type { TransmuxerResult, ChunkMetadata } from '../types/transmuxer';
if (typeof __IN_WORKER__ !== 'undefined' && __IN_WORKER__) {
startWorker(self);
}
function startWorker(self) {
const observer = new EventEmitter();
const forwardMessage = (ev, data) => {
self.postMessage({ event: ev, data: data });
};
// forward events to main thread
observer.on(Events.FRAG_DECRYPTED, forwardMessage);
observer.on(Events.ERROR, forwardMessage);
// forward logger events to main thread
const forwardWorkerLogs = () => {
for (const logFn in logger) {
const func: ILogFunction = (message?) => {
forwardMessage('workerLog', {
logType: logFn,
message,
});
};
logger[logFn] = func;
}
};
self.addEventListener('message', (ev) => {
const data = ev.data;
switch (data.cmd) {
case 'init': {
const config = JSON.parse(data.config);
self.transmuxer = new Transmuxer(
observer,
data.typeSupported,
config,
'',
data.id,
);
enableLogs(config.debug, data.id);
forwardWorkerLogs();
forwardMessage('init', null);
break;
}
case 'configure': {
self.transmuxer.configure(data.config);
break;
}
case 'demux': {
const transmuxResult: TransmuxerResult | Promise<TransmuxerResult> =
self.transmuxer.push(
data.data,
data.decryptdata,
data.chunkMeta,
data.state,
);
if (isPromise(transmuxResult)) {
self.transmuxer.async = true;
transmuxResult
.then((data) => {
emitTransmuxComplete(self, data);
})
.catch((error) => {
forwardMessage(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
chunkMeta: data.chunkMeta,
fatal: false,
error,
err: error,
reason: `transmuxer-worker push error`,
});
});
} else {
self.transmuxer.async = false;
emitTransmuxComplete(self, transmuxResult);
}
break;
}
case 'flush': {
const id = data.chunkMeta;
let transmuxResult = self.transmuxer.flush(id);
const asyncFlush = isPromise(transmuxResult);
if (asyncFlush || self.transmuxer.async) {
if (!isPromise(transmuxResult)) {
transmuxResult = Promise.resolve(transmuxResult);
}
transmuxResult
.then((results: Array<TransmuxerResult>) => {
handleFlushResult(self, results as Array<TransmuxerResult>, id);
})
.catch((error) => {
forwardMessage(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
chunkMeta: data.chunkMeta,
fatal: false,
error,
err: error,
reason: `transmuxer-worker flush error`,
});
});
} else {
handleFlushResult(
self,
transmuxResult as Array<TransmuxerResult>,
id,
);
}
break;
}
default:
break;
}
});
}
function emitTransmuxComplete(
self: any,
transmuxResult: TransmuxerResult,
): boolean {
if (isEmptyResult(transmuxResult.remuxResult)) {
return false;
}
const transferable: Array<ArrayBuffer> = [];
const { audio, video } = transmuxResult.remuxResult;
if (audio) {
addToTransferable(transferable, audio);
}
if (video) {
addToTransferable(transferable, video);
}
self.postMessage(
{ event: 'transmuxComplete', data: transmuxResult },
transferable,
);
return true;
}
// Converts data to a transferable object https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast)
// in order to minimize message passing overhead
function addToTransferable(
transferable: Array<ArrayBuffer>,
track: RemuxedTrack,
) {
if (track.data1) {
transferable.push(track.data1.buffer);
}
if (track.data2) {
transferable.push(track.data2.buffer);
}
}
function handleFlushResult(
self: any,
results: Array<TransmuxerResult>,
chunkMeta: ChunkMetadata,
) {
const parsed = results.reduce(
(parsed, result) => emitTransmuxComplete(self, result) || parsed,
false,
);
if (!parsed) {
// Emit at least one "transmuxComplete" message even if media is not found to update stream-controller state to PARSING
self.postMessage({ event: 'transmuxComplete', data: results[0] });
}
self.postMessage({ event: 'flush', data: chunkMeta });
}
function isEmptyResult(remuxResult: RemuxerResult) {
return (
!remuxResult.audio &&
!remuxResult.video &&
!remuxResult.text &&
!remuxResult.id3 &&
!remuxResult.initSegment
);
}

535
server/node_modules/hls.js/src/demux/transmuxer.ts generated vendored Normal file
View File

@@ -0,0 +1,535 @@
import type { HlsEventEmitter } from '../events';
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import Decrypter from '../crypt/decrypter';
import AACDemuxer from './audio/aacdemuxer';
import MP4Demuxer from '../demux/mp4demuxer';
import TSDemuxer, { TypeSupported } from '../demux/tsdemuxer';
import MP3Demuxer from './audio/mp3demuxer';
import { AC3Demuxer } from './audio/ac3-demuxer';
import MP4Remuxer from '../remux/mp4-remuxer';
import PassThroughRemuxer from '../remux/passthrough-remuxer';
import { logger } from '../utils/logger';
import type { Demuxer, DemuxerResult, KeyData } from '../types/demuxer';
import type { Remuxer } from '../types/remuxer';
import type { TransmuxerResult, ChunkMetadata } from '../types/transmuxer';
import type { HlsConfig } from '../config';
import type { DecryptData } from '../loader/level-key';
import type { PlaylistLevelType } from '../types/loader';
import type { RationalTimestamp } from '../utils/timescale-conversion';
import { optionalSelf } from '../utils/global';
let now;
// performance.now() not available on WebWorker, at least on Safari Desktop
try {
now = self.performance.now.bind(self.performance);
} catch (err) {
logger.debug('Unable to use Performance API on this environment');
now = optionalSelf?.Date.now;
}
type MuxConfig =
| { demux: typeof MP4Demuxer; remux: typeof PassThroughRemuxer }
| { demux: typeof TSDemuxer; remux: typeof MP4Remuxer }
| { demux: typeof AC3Demuxer; remux: typeof MP4Remuxer }
| { demux: typeof AACDemuxer; remux: typeof MP4Remuxer }
| { demux: typeof MP3Demuxer; remux: typeof MP4Remuxer };
const muxConfig: MuxConfig[] = [
{ demux: MP4Demuxer, remux: PassThroughRemuxer },
{ demux: TSDemuxer, remux: MP4Remuxer },
{ demux: AACDemuxer, remux: MP4Remuxer },
{ demux: MP3Demuxer, remux: MP4Remuxer },
];
if (__USE_M2TS_ADVANCED_CODECS__) {
muxConfig.splice(2, 0, { demux: AC3Demuxer, remux: MP4Remuxer });
}
export default class Transmuxer {
public async: boolean = false;
private observer: HlsEventEmitter;
private typeSupported: TypeSupported;
private config: HlsConfig;
private vendor: string;
private id: PlaylistLevelType;
private demuxer?: Demuxer;
private remuxer?: Remuxer;
private decrypter?: Decrypter;
private probe!: Function;
private decryptionPromise: Promise<TransmuxerResult> | null = null;
private transmuxConfig!: TransmuxConfig;
private currentTransmuxState!: TransmuxState;
constructor(
observer: HlsEventEmitter,
typeSupported: TypeSupported,
config: HlsConfig,
vendor: string,
id: PlaylistLevelType,
) {
this.observer = observer;
this.typeSupported = typeSupported;
this.config = config;
this.vendor = vendor;
this.id = id;
}
configure(transmuxConfig: TransmuxConfig) {
this.transmuxConfig = transmuxConfig;
if (this.decrypter) {
this.decrypter.reset();
}
}
push(
data: ArrayBuffer,
decryptdata: DecryptData | null,
chunkMeta: ChunkMetadata,
state?: TransmuxState,
): TransmuxerResult | Promise<TransmuxerResult> {
const stats = chunkMeta.transmuxing;
stats.executeStart = now();
let uintData: Uint8Array = new Uint8Array(data);
const { currentTransmuxState, transmuxConfig } = this;
if (state) {
this.currentTransmuxState = state;
}
const {
contiguous,
discontinuity,
trackSwitch,
accurateTimeOffset,
timeOffset,
initSegmentChange,
} = state || currentTransmuxState;
const {
audioCodec,
videoCodec,
defaultInitPts,
duration,
initSegmentData,
} = transmuxConfig;
const keyData = getEncryptionType(uintData, decryptdata);
if (keyData && keyData.method === 'AES-128') {
const decrypter = this.getDecrypter();
// Software decryption is synchronous; webCrypto is not
if (decrypter.isSync()) {
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
// data is handled in the flush() call
let decryptedData = decrypter.softwareDecrypt(
uintData,
keyData.key.buffer,
keyData.iv.buffer,
);
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
const loadingParts = chunkMeta.part > -1;
if (loadingParts) {
decryptedData = decrypter.flush();
}
if (!decryptedData) {
stats.executeEnd = now();
return emptyResult(chunkMeta);
}
uintData = new Uint8Array(decryptedData);
} else {
this.decryptionPromise = decrypter
.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer)
.then((decryptedData): TransmuxerResult => {
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
// the decrypted data has been transmuxed
const result = this.push(
decryptedData,
null,
chunkMeta,
) as TransmuxerResult;
this.decryptionPromise = null;
return result;
});
return this.decryptionPromise!;
}
}
const resetMuxers = this.needsProbing(discontinuity, trackSwitch);
if (resetMuxers) {
const error = this.configureTransmuxer(uintData);
if (error) {
logger.warn(`[transmuxer] ${error.message}`);
this.observer.emit(Events.ERROR, Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
fatal: false,
error,
reason: error.message,
});
stats.executeEnd = now();
return emptyResult(chunkMeta);
}
}
if (discontinuity || trackSwitch || initSegmentChange || resetMuxers) {
this.resetInitSegment(
initSegmentData,
audioCodec,
videoCodec,
duration,
decryptdata,
);
}
if (discontinuity || initSegmentChange || resetMuxers) {
this.resetInitialTimestamp(defaultInitPts);
}
if (!contiguous) {
this.resetContiguity();
}
const result = this.transmux(
uintData,
keyData,
timeOffset,
accurateTimeOffset,
chunkMeta,
);
const currentState = this.currentTransmuxState;
currentState.contiguous = true;
currentState.discontinuity = false;
currentState.trackSwitch = false;
stats.executeEnd = now();
return result;
}
// Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
flush(
chunkMeta: ChunkMetadata,
): TransmuxerResult[] | Promise<TransmuxerResult[]> {
const stats = chunkMeta.transmuxing;
stats.executeStart = now();
const { decrypter, currentTransmuxState, decryptionPromise } = this;
if (decryptionPromise) {
// Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
// only flushing is required for async decryption
return decryptionPromise.then(() => {
return this.flush(chunkMeta);
});
}
const transmuxResults: TransmuxerResult[] = [];
const { timeOffset } = currentTransmuxState;
if (decrypter) {
// The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
// This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
// or for progressive downloads with small segments)
const decryptedData = decrypter.flush();
if (decryptedData) {
// Push always returns a TransmuxerResult if decryptdata is null
transmuxResults.push(
this.push(decryptedData, null, chunkMeta) as TransmuxerResult,
);
}
}
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
// If probing failed, then Hls.js has been given content its not able to handle
stats.executeEnd = now();
return [emptyResult(chunkMeta)];
}
const demuxResultOrPromise = demuxer.flush(timeOffset);
if (isPromise(demuxResultOrPromise)) {
// Decrypt final SAMPLE-AES samples
return demuxResultOrPromise.then((demuxResult) => {
this.flushRemux(transmuxResults, demuxResult, chunkMeta);
return transmuxResults;
});
}
this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta);
return transmuxResults;
}
private flushRemux(
transmuxResults: TransmuxerResult[],
demuxResult: DemuxerResult,
chunkMeta: ChunkMetadata,
) {
const { audioTrack, videoTrack, id3Track, textTrack } = demuxResult;
const { accurateTimeOffset, timeOffset } = this.currentTransmuxState;
logger.log(
`[transmuxer.ts]: Flushed fragment ${chunkMeta.sn}${
chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : ''
} of level ${chunkMeta.level}`,
);
const remuxResult = this.remuxer!.remux(
audioTrack,
videoTrack,
id3Track,
textTrack,
timeOffset,
accurateTimeOffset,
true,
this.id,
);
transmuxResults.push({
remuxResult,
chunkMeta,
});
chunkMeta.transmuxing.executeEnd = now();
}
resetInitialTimestamp(defaultInitPts: RationalTimestamp | null) {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetTimeStamp(defaultInitPts);
remuxer.resetTimeStamp(defaultInitPts);
}
resetContiguity() {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetContiguity();
remuxer.resetNextTimestamp();
}
resetInitSegment(
initSegmentData: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
decryptdata: DecryptData | null,
) {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetInitSegment(
initSegmentData,
audioCodec,
videoCodec,
trackDuration,
);
remuxer.resetInitSegment(
initSegmentData,
audioCodec,
videoCodec,
decryptdata,
);
}
destroy(): void {
if (this.demuxer) {
this.demuxer.destroy();
this.demuxer = undefined;
}
if (this.remuxer) {
this.remuxer.destroy();
this.remuxer = undefined;
}
}
private transmux(
data: Uint8Array,
keyData: KeyData | null,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata,
): TransmuxerResult | Promise<TransmuxerResult> {
let result: TransmuxerResult | Promise<TransmuxerResult>;
if (keyData && keyData.method === 'SAMPLE-AES') {
result = this.transmuxSampleAes(
data,
keyData,
timeOffset,
accurateTimeOffset,
chunkMeta,
);
} else {
result = this.transmuxUnencrypted(
data,
timeOffset,
accurateTimeOffset,
chunkMeta,
);
}
return result;
}
private transmuxUnencrypted(
data: Uint8Array,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata,
): TransmuxerResult {
const { audioTrack, videoTrack, id3Track, textTrack } = (
this.demuxer as Demuxer
).demux(data, timeOffset, false, !this.config.progressive);
const remuxResult = this.remuxer!.remux(
audioTrack,
videoTrack,
id3Track,
textTrack,
timeOffset,
accurateTimeOffset,
false,
this.id,
);
return {
remuxResult,
chunkMeta,
};
}
private transmuxSampleAes(
data: Uint8Array,
decryptData: KeyData,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata,
): Promise<TransmuxerResult> {
return (this.demuxer as Demuxer)
.demuxSampleAes(data, decryptData, timeOffset)
.then((demuxResult) => {
const remuxResult = this.remuxer!.remux(
demuxResult.audioTrack,
demuxResult.videoTrack,
demuxResult.id3Track,
demuxResult.textTrack,
timeOffset,
accurateTimeOffset,
false,
this.id,
);
return {
remuxResult,
chunkMeta,
};
});
}
private configureTransmuxer(data: Uint8Array): void | Error {
const { config, observer, typeSupported, vendor } = this;
// probe for content type
let mux;
for (let i = 0, len = muxConfig.length; i < len; i++) {
if (muxConfig[i].demux?.probe(data)) {
mux = muxConfig[i];
break;
}
}
if (!mux) {
return new Error('Failed to find demuxer by probing fragment data');
}
// so let's check that current remuxer and demuxer are still valid
const demuxer = this.demuxer;
const remuxer = this.remuxer;
const Remuxer: MuxConfig['remux'] = mux.remux;
const Demuxer: MuxConfig['demux'] = mux.demux;
if (!remuxer || !(remuxer instanceof Remuxer)) {
this.remuxer = new Remuxer(observer, config, typeSupported, vendor);
}
if (!demuxer || !(demuxer instanceof Demuxer)) {
this.demuxer = new Demuxer(observer, config, typeSupported);
this.probe = Demuxer.probe;
}
}
private needsProbing(discontinuity: boolean, trackSwitch: boolean): boolean {
// in case of continuity change, or track switch
// we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
return !this.demuxer || !this.remuxer || discontinuity || trackSwitch;
}
private getDecrypter(): Decrypter {
let decrypter = this.decrypter;
if (!decrypter) {
decrypter = this.decrypter = new Decrypter(this.config);
}
return decrypter;
}
}
function getEncryptionType(
data: Uint8Array,
decryptData: DecryptData | null,
): KeyData | null {
let encryptionType: KeyData | null = null;
if (
data.byteLength > 0 &&
decryptData?.key != null &&
decryptData.iv !== null &&
decryptData.method != null
) {
encryptionType = decryptData as KeyData;
}
return encryptionType;
}
const emptyResult = (chunkMeta): TransmuxerResult => ({
remuxResult: {},
chunkMeta,
});
export function isPromise<T>(p: Promise<T> | any): p is Promise<T> {
return 'then' in p && p.then instanceof Function;
}
export class TransmuxConfig {
public audioCodec?: string;
public videoCodec?: string;
public initSegmentData?: Uint8Array;
public duration: number;
public defaultInitPts: RationalTimestamp | null;
constructor(
audioCodec: string | undefined,
videoCodec: string | undefined,
initSegmentData: Uint8Array | undefined,
duration: number,
defaultInitPts?: RationalTimestamp,
) {
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.initSegmentData = initSegmentData;
this.duration = duration;
this.defaultInitPts = defaultInitPts || null;
}
}
export class TransmuxState {
public discontinuity: boolean;
public contiguous: boolean;
public accurateTimeOffset: boolean;
public trackSwitch: boolean;
public timeOffset: number;
public initSegmentChange: boolean;
constructor(
discontinuity: boolean,
contiguous: boolean,
accurateTimeOffset: boolean,
trackSwitch: boolean,
timeOffset: number,
initSegmentChange: boolean,
) {
this.discontinuity = discontinuity;
this.contiguous = contiguous;
this.accurateTimeOffset = accurateTimeOffset;
this.trackSwitch = trackSwitch;
this.timeOffset = timeOffset;
this.initSegmentChange = initSegmentChange;
}
}

1001
server/node_modules/hls.js/src/demux/tsdemuxer.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,340 @@
import BaseVideoParser from './base-video-parser';
import {
DemuxedVideoTrack,
DemuxedUserdataTrack,
VideoSampleUnit,
} from '../../types/demuxer';
import {
appendUint8Array,
parseSEIMessageFromNALu,
} from '../../utils/mp4-tools';
import ExpGolomb from './exp-golomb';
import type { PES } from '../tsdemuxer';
class AvcVideoParser extends BaseVideoParser {
public parseAVCPES(
track: DemuxedVideoTrack,
textTrack: DemuxedUserdataTrack,
pes: PES,
last: boolean,
duration: number,
) {
const units = this.parseAVCNALu(track, pes.data);
const debug = false;
let VideoSample = this.VideoSample;
let push: boolean;
let spsfound = false;
// free pes.data to save up some memory
(pes as any).data = null;
// if new NAL units found and last sample still there, let's push ...
// this helps parsing streams with missing AUD (only do this if AUD never found)
if (VideoSample && units.length && !track.audFound) {
this.pushAccessUnit(VideoSample, track);
VideoSample = this.VideoSample = this.createVideoSample(
false,
pes.pts,
pes.dts,
'',
);
}
units.forEach((unit) => {
switch (unit.type) {
// NDR
case 1: {
let iskey = false;
push = true;
const data = unit.data;
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
if (spsfound && data.length > 4) {
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
const sliceType = new ExpGolomb(data).readSliceType();
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
// I slice: A slice that is not an SI slice that is decoded using intra prediction only.
// if (sliceType === 2 || sliceType === 7) {
if (
sliceType === 2 ||
sliceType === 4 ||
sliceType === 7 ||
sliceType === 9
) {
iskey = true;
}
}
if (iskey) {
// if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
if (VideoSample?.frame && !VideoSample.key) {
this.pushAccessUnit(VideoSample, track);
VideoSample = this.VideoSample = null;
}
}
if (!VideoSample) {
VideoSample = this.VideoSample = this.createVideoSample(
true,
pes.pts,
pes.dts,
'',
);
}
if (debug) {
VideoSample.debug += 'NDR ';
}
VideoSample.frame = true;
VideoSample.key = iskey;
break;
// IDR
}
case 5:
push = true;
// handle PES not starting with AUD
// if we have frame data already, that cannot belong to the same frame, so force a push
if (VideoSample?.frame && !VideoSample.key) {
this.pushAccessUnit(VideoSample, track);
VideoSample = this.VideoSample = null;
}
if (!VideoSample) {
VideoSample = this.VideoSample = this.createVideoSample(
true,
pes.pts,
pes.dts,
'',
);
}
if (debug) {
VideoSample.debug += 'IDR ';
}
VideoSample.key = true;
VideoSample.frame = true;
break;
// SEI
case 6: {
push = true;
if (debug && VideoSample) {
VideoSample.debug += 'SEI ';
}
parseSEIMessageFromNALu(
unit.data,
1,
pes.pts as number,
textTrack.samples,
);
break;
// SPS
}
case 7: {
push = true;
spsfound = true;
if (debug && VideoSample) {
VideoSample.debug += 'SPS ';
}
const sps = unit.data;
const expGolombDecoder = new ExpGolomb(sps);
const config = expGolombDecoder.readSPS();
if (
!track.sps ||
track.width !== config.width ||
track.height !== config.height ||
track.pixelRatio?.[0] !== config.pixelRatio[0] ||
track.pixelRatio?.[1] !== config.pixelRatio[1]
) {
track.width = config.width;
track.height = config.height;
track.pixelRatio = config.pixelRatio;
track.sps = [sps];
track.duration = duration;
const codecarray = sps.subarray(1, 4);
let codecstring = 'avc1.';
for (let i = 0; i < 3; i++) {
let h = codecarray[i].toString(16);
if (h.length < 2) {
h = '0' + h;
}
codecstring += h;
}
track.codec = codecstring;
}
break;
}
// PPS
case 8:
push = true;
if (debug && VideoSample) {
VideoSample.debug += 'PPS ';
}
track.pps = [unit.data];
break;
// AUD
case 9:
push = true;
track.audFound = true;
if (VideoSample) {
this.pushAccessUnit(VideoSample, track);
}
VideoSample = this.VideoSample = this.createVideoSample(
false,
pes.pts,
pes.dts,
debug ? 'AUD ' : '',
);
break;
// Filler Data
case 12:
push = true;
break;
default:
push = false;
if (VideoSample) {
VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
}
break;
}
if (VideoSample && push) {
const units = VideoSample.units;
units.push(unit);
}
});
// if last PES packet, push samples
if (last && VideoSample) {
this.pushAccessUnit(VideoSample, track);
this.VideoSample = null;
}
}
private parseAVCNALu(
track: DemuxedVideoTrack,
array: Uint8Array,
): Array<{
data: Uint8Array;
type: number;
state?: number;
}> {
const len = array.byteLength;
let state = track.naluState || 0;
const lastState = state;
const units: VideoSampleUnit[] = [];
let i = 0;
let value: number;
let overflow: number;
let unitType: number;
let lastUnitStart = -1;
let lastUnitType: number = 0;
// logger.log('PES:' + Hex.hexDump(array));
if (state === -1) {
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
lastUnitStart = 0;
// NALu type is value read from offset 0
lastUnitType = array[0] & 0x1f;
state = 0;
i = 1;
}
while (i < len) {
value = array[i++];
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
if (!state) {
state = value ? 0 : 1;
continue;
}
if (state === 1) {
state = value ? 0 : 2;
continue;
}
// here we have state either equal to 2 or 3
if (!value) {
state = 3;
} else if (value === 1) {
overflow = i - state - 1;
if (lastUnitStart >= 0) {
const unit: VideoSampleUnit = {
data: array.subarray(lastUnitStart, overflow),
type: lastUnitType,
};
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
units.push(unit);
} else {
// lastUnitStart is undefined => this is the first start code found in this PES packet
// first check if start code delimiter is overlapping between 2 PES packets,
// ie it started in last packet (lastState not zero)
// and ended at the beginning of this PES packet (i <= 4 - lastState)
const lastUnit = this.getLastNalUnit(track.samples);
if (lastUnit) {
if (lastState && i <= 4 - lastState) {
// start delimiter overlapping between PES packets
// strip start delimiter bytes from the end of last NAL unit
// check if lastUnit had a state different from zero
if (lastUnit.state) {
// strip last bytes
lastUnit.data = lastUnit.data.subarray(
0,
lastUnit.data.byteLength - lastState,
);
}
}
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
if (overflow > 0) {
// logger.log('first NALU found with overflow:' + overflow);
lastUnit.data = appendUint8Array(
lastUnit.data,
array.subarray(0, overflow),
);
lastUnit.state = 0;
}
}
}
// check if we can read unit type
if (i < len) {
unitType = array[i] & 0x1f;
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
lastUnitStart = i;
lastUnitType = unitType;
state = 0;
} else {
// not enough byte to read unit type. let's read it on next PES parsing
state = -1;
}
} else {
state = 0;
}
}
if (lastUnitStart >= 0 && state >= 0) {
const unit: VideoSampleUnit = {
data: array.subarray(lastUnitStart, len),
type: lastUnitType,
state: state,
};
units.push(unit);
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
}
// no NALu found
if (units.length === 0) {
// append pes.data to previous NAL unit
const lastUnit = this.getLastNalUnit(track.samples);
if (lastUnit) {
lastUnit.data = appendUint8Array(lastUnit.data, array);
}
}
track.naluState = state;
return units;
}
}
export default AvcVideoParser;

View File

@@ -0,0 +1,74 @@
import type { ParsedVideoSample } from '../tsdemuxer';
import {
DemuxedVideoTrack,
VideoSample,
VideoSampleUnit,
} from '../../types/demuxer';
import { logger } from '../../utils/logger';
class BaseVideoParser {
protected VideoSample: ParsedVideoSample | null = null;
protected createVideoSample(
key: boolean,
pts: number | undefined,
dts: number | undefined,
debug: string,
): ParsedVideoSample {
return {
key,
frame: false,
pts,
dts,
units: [],
debug,
length: 0,
};
}
protected getLastNalUnit(
samples: VideoSample[],
): VideoSampleUnit | undefined {
let VideoSample = this.VideoSample;
let lastUnit: VideoSampleUnit | undefined;
// try to fallback to previous sample if current one is empty
if (!VideoSample || VideoSample.units.length === 0) {
VideoSample = samples[samples.length - 1];
}
if (VideoSample?.units) {
const units = VideoSample.units;
lastUnit = units[units.length - 1];
}
return lastUnit;
}
protected pushAccessUnit(
VideoSample: ParsedVideoSample,
videoTrack: DemuxedVideoTrack,
) {
if (VideoSample.units.length && VideoSample.frame) {
// if sample does not have PTS/DTS, patch with last sample PTS/DTS
if (VideoSample.pts === undefined) {
const samples = videoTrack.samples;
const nbSamples = samples.length;
if (nbSamples) {
const lastSample = samples[nbSamples - 1];
VideoSample.pts = lastSample.pts;
VideoSample.dts = lastSample.dts;
} else {
// dropping samples, no timestamp found
videoTrack.dropped++;
return;
}
}
videoTrack.samples.push(VideoSample as VideoSample);
}
if (VideoSample.debug.length) {
logger.log(
VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug,
);
}
}
}
export default BaseVideoParser;

View File

@@ -0,0 +1,361 @@
/**
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
*/
import { logger } from '../../utils/logger';
class ExpGolomb {
private data: Uint8Array;
public bytesAvailable: number;
private word: number;
private bitsAvailable: number;
constructor(data: Uint8Array) {
this.data = data;
// the number of bytes left to examine in this.data
this.bytesAvailable = data.byteLength;
// the current word being examined
this.word = 0; // :uint
// the number of bits left to examine in the current word
this.bitsAvailable = 0; // :uint
}
// ():void
loadWord(): void {
const data = this.data;
const bytesAvailable = this.bytesAvailable;
const position = data.byteLength - bytesAvailable;
const workingBytes = new Uint8Array(4);
const availableBytes = Math.min(4, bytesAvailable);
if (availableBytes === 0) {
throw new Error('no bytes available');
}
workingBytes.set(data.subarray(position, position + availableBytes));
this.word = new DataView(workingBytes.buffer).getUint32(0);
// track the amount of this.data that has been processed
this.bitsAvailable = availableBytes * 8;
this.bytesAvailable -= availableBytes;
}
// (count:int):void
skipBits(count: number): void {
let skipBytes; // :int
count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable);
if (this.bitsAvailable > count) {
this.word <<= count;
this.bitsAvailable -= count;
} else {
count -= this.bitsAvailable;
skipBytes = count >> 3;
count -= skipBytes << 3;
this.bytesAvailable -= skipBytes;
this.loadWord();
this.word <<= count;
this.bitsAvailable -= count;
}
}
// (size:int):uint
readBits(size: number): number {
let bits = Math.min(this.bitsAvailable, size); // :uint
const valu = this.word >>> (32 - bits); // :uint
if (size > 32) {
logger.error('Cannot read more than 32 bits at a time');
}
this.bitsAvailable -= bits;
if (this.bitsAvailable > 0) {
this.word <<= bits;
} else if (this.bytesAvailable > 0) {
this.loadWord();
} else {
throw new Error('no bits available');
}
bits = size - bits;
if (bits > 0 && this.bitsAvailable) {
return (valu << bits) | this.readBits(bits);
} else {
return valu;
}
}
// ():uint
skipLZ(): number {
let leadingZeroCount; // :uint
for (
leadingZeroCount = 0;
leadingZeroCount < this.bitsAvailable;
++leadingZeroCount
) {
if ((this.word & (0x80000000 >>> leadingZeroCount)) !== 0) {
// the first bit of working word is 1
this.word <<= leadingZeroCount;
this.bitsAvailable -= leadingZeroCount;
return leadingZeroCount;
}
}
// we exhausted word and still have not found a 1
this.loadWord();
return leadingZeroCount + this.skipLZ();
}
// ():void
skipUEG(): void {
this.skipBits(1 + this.skipLZ());
}
// ():void
skipEG(): void {
this.skipBits(1 + this.skipLZ());
}
// ():uint
readUEG(): number {
const clz = this.skipLZ(); // :uint
return this.readBits(clz + 1) - 1;
}
// ():int
readEG(): number {
const valu = this.readUEG(); // :int
if (0x01 & valu) {
// the number is odd if the low order bit is set
return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
} else {
return -1 * (valu >>> 1); // divide by two then make it negative
}
}
// Some convenience functions
// :Boolean
readBoolean(): boolean {
return this.readBits(1) === 1;
}
// ():int
readUByte(): number {
return this.readBits(8);
}
// ():int
readUShort(): number {
return this.readBits(16);
}
// ():int
readUInt(): number {
return this.readBits(32);
}
/**
* Advance the ExpGolomb decoder past a scaling list. The scaling
* list is optionally transmitted as part of a sequence parameter
* set and is not relevant to transmuxing.
* @param count the number of entries in this scaling list
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
*/
skipScalingList(count: number): void {
let lastScale = 8;
let nextScale = 8;
let deltaScale;
for (let j = 0; j < count; j++) {
if (nextScale !== 0) {
deltaScale = this.readEG();
nextScale = (lastScale + deltaScale + 256) % 256;
}
lastScale = nextScale === 0 ? lastScale : nextScale;
}
}
/**
* Read a sequence parameter set and return some interesting video
* properties. A sequence parameter set is the H264 metadata that
* describes the properties of upcoming video frames.
* @returns an object with configuration parsed from the
* sequence parameter set, including the dimensions of the
* associated video frames.
*/
readSPS(): {
width: number;
height: number;
pixelRatio: [number, number];
} {
let frameCropLeftOffset = 0;
let frameCropRightOffset = 0;
let frameCropTopOffset = 0;
let frameCropBottomOffset = 0;
let numRefFramesInPicOrderCntCycle;
let scalingListCount;
let i;
const readUByte = this.readUByte.bind(this);
const readBits = this.readBits.bind(this);
const readUEG = this.readUEG.bind(this);
const readBoolean = this.readBoolean.bind(this);
const skipBits = this.skipBits.bind(this);
const skipEG = this.skipEG.bind(this);
const skipUEG = this.skipUEG.bind(this);
const skipScalingList = this.skipScalingList.bind(this);
readUByte();
const profileIdc = readUByte(); // profile_idc
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
skipBits(3); // reserved_zero_3bits u(3),
readUByte(); // level_idc u(8)
skipUEG(); // seq_parameter_set_id
// some profiles have more optional data we don't need
if (
profileIdc === 100 ||
profileIdc === 110 ||
profileIdc === 122 ||
profileIdc === 244 ||
profileIdc === 44 ||
profileIdc === 83 ||
profileIdc === 86 ||
profileIdc === 118 ||
profileIdc === 128
) {
const chromaFormatIdc = readUEG();
if (chromaFormatIdc === 3) {
skipBits(1);
} // separate_colour_plane_flag
skipUEG(); // bit_depth_luma_minus8
skipUEG(); // bit_depth_chroma_minus8
skipBits(1); // qpprime_y_zero_transform_bypass_flag
if (readBoolean()) {
// seq_scaling_matrix_present_flag
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
for (i = 0; i < scalingListCount; i++) {
if (readBoolean()) {
// seq_scaling_list_present_flag[ i ]
if (i < 6) {
skipScalingList(16);
} else {
skipScalingList(64);
}
}
}
}
}
skipUEG(); // log2_max_frame_num_minus4
const picOrderCntType = readUEG();
if (picOrderCntType === 0) {
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
} else if (picOrderCntType === 1) {
skipBits(1); // delta_pic_order_always_zero_flag
skipEG(); // offset_for_non_ref_pic
skipEG(); // offset_for_top_to_bottom_field
numRefFramesInPicOrderCntCycle = readUEG();
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
skipEG();
} // offset_for_ref_frame[ i ]
}
skipUEG(); // max_num_ref_frames
skipBits(1); // gaps_in_frame_num_value_allowed_flag
const picWidthInMbsMinus1 = readUEG();
const picHeightInMapUnitsMinus1 = readUEG();
const frameMbsOnlyFlag = readBits(1);
if (frameMbsOnlyFlag === 0) {
skipBits(1);
} // mb_adaptive_frame_field_flag
skipBits(1); // direct_8x8_inference_flag
if (readBoolean()) {
// frame_cropping_flag
frameCropLeftOffset = readUEG();
frameCropRightOffset = readUEG();
frameCropTopOffset = readUEG();
frameCropBottomOffset = readUEG();
}
let pixelRatio: [number, number] = [1, 1];
if (readBoolean()) {
// vui_parameters_present_flag
if (readBoolean()) {
// aspect_ratio_info_present_flag
const aspectRatioIdc = readUByte();
switch (aspectRatioIdc) {
case 1:
pixelRatio = [1, 1];
break;
case 2:
pixelRatio = [12, 11];
break;
case 3:
pixelRatio = [10, 11];
break;
case 4:
pixelRatio = [16, 11];
break;
case 5:
pixelRatio = [40, 33];
break;
case 6:
pixelRatio = [24, 11];
break;
case 7:
pixelRatio = [20, 11];
break;
case 8:
pixelRatio = [32, 11];
break;
case 9:
pixelRatio = [80, 33];
break;
case 10:
pixelRatio = [18, 11];
break;
case 11:
pixelRatio = [15, 11];
break;
case 12:
pixelRatio = [64, 33];
break;
case 13:
pixelRatio = [160, 99];
break;
case 14:
pixelRatio = [4, 3];
break;
case 15:
pixelRatio = [3, 2];
break;
case 16:
pixelRatio = [2, 1];
break;
case 255: {
pixelRatio = [
(readUByte() << 8) | readUByte(),
(readUByte() << 8) | readUByte(),
];
break;
}
}
}
}
return {
width: Math.ceil(
(picWidthInMbsMinus1 + 1) * 16 -
frameCropLeftOffset * 2 -
frameCropRightOffset * 2,
),
height:
(2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 -
(frameMbsOnlyFlag ? 2 : 4) *
(frameCropTopOffset + frameCropBottomOffset),
pixelRatio: pixelRatio,
};
}
readSliceType() {
// skip NALu type
this.readUByte();
// discard first_mb_in_slice
this.readUEG();
// return slice_type
return this.readUEG();
}
}
export default ExpGolomb;

5
server/node_modules/hls.js/src/empty-es.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
// This file is inserted as a shim for modules which we do not want to include into the distro.
// This replacement is done in the "alias" plugin of the rollup config.
// Use a ES dedicated file as Rollup assigns an object in the output
// For example: "var KeySystemFormats = emptyEs.KeySystemFormats;"
module.exports = {};

3
server/node_modules/hls.js/src/empty.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
// This file is inserted as a shim for modules which we do not want to include into the distro.
// This replacement is done in the "alias" plugin of the rollup config.
module.exports = undefined;

90
server/node_modules/hls.js/src/errors.ts generated vendored Normal file
View File

@@ -0,0 +1,90 @@
export enum ErrorTypes {
// Identifier for a network error (loading error / timeout ...)
NETWORK_ERROR = 'networkError',
// Identifier for a media Error (video/parsing/mediasource error)
MEDIA_ERROR = 'mediaError',
// EME (encrypted media extensions) errors
KEY_SYSTEM_ERROR = 'keySystemError',
// Identifier for a mux Error (demuxing/remuxing)
MUX_ERROR = 'muxError',
// Identifier for all other errors
OTHER_ERROR = 'otherError',
}
export enum ErrorDetails {
KEY_SYSTEM_NO_KEYS = 'keySystemNoKeys',
KEY_SYSTEM_NO_ACCESS = 'keySystemNoAccess',
KEY_SYSTEM_NO_SESSION = 'keySystemNoSession',
KEY_SYSTEM_NO_CONFIGURED_LICENSE = 'keySystemNoConfiguredLicense',
KEY_SYSTEM_LICENSE_REQUEST_FAILED = 'keySystemLicenseRequestFailed',
KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED = 'keySystemServerCertificateRequestFailed',
KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED = 'keySystemServerCertificateUpdateFailed',
KEY_SYSTEM_SESSION_UPDATE_FAILED = 'keySystemSessionUpdateFailed',
KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED = 'keySystemStatusOutputRestricted',
KEY_SYSTEM_STATUS_INTERNAL_ERROR = 'keySystemStatusInternalError',
// Identifier for a manifest load error - data: { url : faulty URL, response : { code: error code, text: error text }}
MANIFEST_LOAD_ERROR = 'manifestLoadError',
// Identifier for a manifest load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
MANIFEST_LOAD_TIMEOUT = 'manifestLoadTimeOut',
// Identifier for a manifest parsing error - data: { url : faulty URL, reason : error reason}
MANIFEST_PARSING_ERROR = 'manifestParsingError',
// Identifier for a manifest with only incompatible codecs error - data: { url : faulty URL, reason : error reason}
MANIFEST_INCOMPATIBLE_CODECS_ERROR = 'manifestIncompatibleCodecsError',
// Identifier for a level which contains no fragments - data: { url: faulty URL, reason: "no fragments found in level", level: index of the bad level }
LEVEL_EMPTY_ERROR = 'levelEmptyError',
// Identifier for a level load error - data: { url : faulty URL, response : { code: error code, text: error text }}
LEVEL_LOAD_ERROR = 'levelLoadError',
// Identifier for a level load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
LEVEL_LOAD_TIMEOUT = 'levelLoadTimeOut',
// Identifier for a level parse error - data: { url : faulty URL, error: Error, reason: error message }
LEVEL_PARSING_ERROR = 'levelParsingError',
// Identifier for a level switch error - data: { level : faulty level Id, event : error description}
LEVEL_SWITCH_ERROR = 'levelSwitchError',
// Identifier for an audio track load error - data: { url : faulty URL, response : { code: error code, text: error text }}
AUDIO_TRACK_LOAD_ERROR = 'audioTrackLoadError',
// Identifier for an audio track load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
AUDIO_TRACK_LOAD_TIMEOUT = 'audioTrackLoadTimeOut',
// Identifier for a subtitle track load error - data: { url : faulty URL, response : { code: error code, text: error text }}
SUBTITLE_LOAD_ERROR = 'subtitleTrackLoadError',
// Identifier for a subtitle track load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
SUBTITLE_TRACK_LOAD_TIMEOUT = 'subtitleTrackLoadTimeOut',
// Identifier for fragment load error - data: { frag : fragment object, response : { code: error code, text: error text }}
FRAG_LOAD_ERROR = 'fragLoadError',
// Identifier for fragment load timeout error - data: { frag : fragment object}
FRAG_LOAD_TIMEOUT = 'fragLoadTimeOut',
// Identifier for a fragment decryption error event - data: {id : demuxer Id,frag: fragment object, reason : parsing error description }
FRAG_DECRYPT_ERROR = 'fragDecryptError',
// Identifier for a fragment parsing error event - data: { id : demuxer Id, reason : parsing error description }
// will be renamed DEMUX_PARSING_ERROR and switched to MUX_ERROR in the next major release
FRAG_PARSING_ERROR = 'fragParsingError',
// Identifier for a fragment or part load skipped because of a GAP tag or attribute
FRAG_GAP = 'fragGap',
// Identifier for a remux alloc error event - data: { id : demuxer Id, frag : fragment object, bytes : nb of bytes on which allocation failed , reason : error text }
REMUX_ALLOC_ERROR = 'remuxAllocError',
// Identifier for decrypt key load error - data: { frag : fragment object, response : { code: error code, text: error text }}
KEY_LOAD_ERROR = 'keyLoadError',
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
KEY_LOAD_TIMEOUT = 'keyLoadTimeOut',
// Triggered when an exception occurs while adding a sourceBuffer to MediaSource - data : { error : exception , mimeType : mimeType }
BUFFER_ADD_CODEC_ERROR = 'bufferAddCodecError',
// Triggered when source buffer(s) could not be created using level (manifest CODECS attribute), parsed media, or best guess codec(s) - data: { reason : error reason }
BUFFER_INCOMPATIBLE_CODECS_ERROR = 'bufferIncompatibleCodecsError',
// Identifier for a buffer append error - data: append error description
BUFFER_APPEND_ERROR = 'bufferAppendError',
// Identifier for a buffer appending error event - data: appending error description
BUFFER_APPENDING_ERROR = 'bufferAppendingError',
// Identifier for a buffer stalled error event
BUFFER_STALLED_ERROR = 'bufferStalledError',
// Identifier for a buffer full event
BUFFER_FULL_ERROR = 'bufferFullError',
// Identifier for a buffer seek over hole event
BUFFER_SEEK_OVER_HOLE = 'bufferSeekOverHole',
// Identifier for a buffer nudge on stall (playback is stuck although currentTime is in a buffered area)
BUFFER_NUDGE_ON_STALL = 'bufferNudgeOnStall',
// Identifier for an internal exception happening inside hls.js while handling an event
INTERNAL_EXCEPTION = 'internalException',
// Identifier for an internal call to abort a loader
INTERNAL_ABORTED = 'aborted',
// Uncategorized error
UNKNOWN = 'unknown',
}

406
server/node_modules/hls.js/src/events.ts generated vendored Normal file
View File

@@ -0,0 +1,406 @@
import {
ManifestLoadedData,
ManifestLoadingData,
MediaAttachedData,
MediaAttachingData,
LevelLoadingData,
LevelLoadedData,
ManifestParsedData,
LevelUpdatedData,
LevelsUpdatedData,
FragParsingUserdataData,
FragDecryptedData,
FragLoadedData,
InitPTSFoundData,
CuesParsedData,
SubtitleFragProcessedData,
NonNativeTextTracksData,
FragLoadingData,
AudioTrackLoadedData,
SubtitleTrackLoadedData,
ErrorData,
AudioTrackSwitchingData,
AudioTrackSwitchedData,
KeyLoadedData,
KeyLoadingData,
SubtitleTrackSwitchData,
SubtitleTracksUpdatedData,
LevelSwitchedData,
FragChangedData,
BufferAppendingData,
BufferCodecsData,
FragParsingMetadataData,
FragParsingInitSegmentData,
FragBufferedData,
BufferFlushingData,
BufferEOSData,
LevelSwitchingData,
MaxAutoLevelUpdatedData,
FPSDropLevelCappingData,
FPSDropData,
BufferCreatedData,
BufferAppendedData,
LevelPTSUpdatedData,
FragParsedData,
AudioTracksUpdatedData,
FragLoadEmergencyAbortedData,
BackBufferData,
LiveBackBufferData,
TrackLoadingData,
BufferFlushedData,
SteeringManifestLoadedData,
} from './types/events';
export enum Events {
// Fired before MediaSource is attaching to media element
MEDIA_ATTACHING = 'hlsMediaAttaching',
// Fired when MediaSource has been successfully attached to media element
MEDIA_ATTACHED = 'hlsMediaAttached',
// Fired before detaching MediaSource from media element
MEDIA_DETACHING = 'hlsMediaDetaching',
// Fired when MediaSource has been detached from media element
MEDIA_DETACHED = 'hlsMediaDetached',
// Fired when the buffer is going to be reset
BUFFER_RESET = 'hlsBufferReset',
// Fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
BUFFER_CODECS = 'hlsBufferCodecs',
// fired when sourcebuffers have been created - data: { tracks : tracks }
BUFFER_CREATED = 'hlsBufferCreated',
// fired when we append a segment to the buffer - data: { segment: segment object }
BUFFER_APPENDING = 'hlsBufferAppending',
// fired when we are done with appending a media segment to the buffer - data : { parent : segment parent that triggered BUFFER_APPENDING, pending : nb of segments waiting for appending for this segment parent}
BUFFER_APPENDED = 'hlsBufferAppended',
// fired when the stream is finished and we want to notify the media buffer that there will be no more data - data: { }
BUFFER_EOS = 'hlsBufferEos',
// fired when the media buffer should be flushed - data { startOffset, endOffset }
BUFFER_FLUSHING = 'hlsBufferFlushing',
// fired when the media buffer has been flushed - data: { }
BUFFER_FLUSHED = 'hlsBufferFlushed',
// fired to signal that a manifest loading starts - data: { url : manifestURL}
MANIFEST_LOADING = 'hlsManifestLoading',
// fired after manifest has been loaded - data: { levels : [available quality levels], audioTracks : [ available audio tracks ], url : manifestURL, stats : LoaderStats }
MANIFEST_LOADED = 'hlsManifestLoaded',
// fired after manifest has been parsed - data: { levels : [available quality levels], firstLevel : index of first quality level appearing in Manifest}
MANIFEST_PARSED = 'hlsManifestParsed',
// fired when a level switch is requested - data: { level : id of new level }
LEVEL_SWITCHING = 'hlsLevelSwitching',
// fired when a level switch is effective - data: { level : id of new level }
LEVEL_SWITCHED = 'hlsLevelSwitched',
// fired when a level playlist loading starts - data: { url : level URL, level : id of level being loaded}
LEVEL_LOADING = 'hlsLevelLoading',
// fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : LoaderStats }
LEVEL_LOADED = 'hlsLevelLoaded',
// fired when a level's details have been updated based on previous details, after it has been loaded - data: { details : levelDetails object, level : id of updated level }
LEVEL_UPDATED = 'hlsLevelUpdated',
// fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
LEVEL_PTS_UPDATED = 'hlsLevelPtsUpdated',
// fired to notify that levels have changed after removing a level - data: { levels : [available quality levels] }
LEVELS_UPDATED = 'hlsLevelsUpdated',
// fired to notify that audio track lists has been updated - data: { audioTracks : audioTracks }
AUDIO_TRACKS_UPDATED = 'hlsAudioTracksUpdated',
// fired when an audio track switching is requested - data: { id : audio track id }
AUDIO_TRACK_SWITCHING = 'hlsAudioTrackSwitching',
// fired when an audio track switch actually occurs - data: { id : audio track id }
AUDIO_TRACK_SWITCHED = 'hlsAudioTrackSwitched',
// fired when an audio track loading starts - data: { url : audio track URL, id : audio track id }
AUDIO_TRACK_LOADING = 'hlsAudioTrackLoading',
// fired when an audio track loading finishes - data: { details : levelDetails object, id : audio track id, stats : LoaderStats }
AUDIO_TRACK_LOADED = 'hlsAudioTrackLoaded',
// fired to notify that subtitle track lists has been updated - data: { subtitleTracks : subtitleTracks }
SUBTITLE_TRACKS_UPDATED = 'hlsSubtitleTracksUpdated',
// fired to notify that subtitle tracks were cleared as a result of stopping the media
SUBTITLE_TRACKS_CLEARED = 'hlsSubtitleTracksCleared',
// fired when an subtitle track switch occurs - data: { id : subtitle track id }
SUBTITLE_TRACK_SWITCH = 'hlsSubtitleTrackSwitch',
// fired when a subtitle track loading starts - data: { url : subtitle track URL, id : subtitle track id }
SUBTITLE_TRACK_LOADING = 'hlsSubtitleTrackLoading',
// fired when a subtitle track loading finishes - data: { details : levelDetails object, id : subtitle track id, stats : LoaderStats }
SUBTITLE_TRACK_LOADED = 'hlsSubtitleTrackLoaded',
// fired when a subtitle fragment has been processed - data: { success : boolean, frag : the processed frag }
SUBTITLE_FRAG_PROCESSED = 'hlsSubtitleFragProcessed',
// fired when a set of VTTCues to be managed externally has been parsed - data: { type: string, track: string, cues: [ VTTCue ] }
CUES_PARSED = 'hlsCuesParsed',
// fired when a text track to be managed externally is found - data: { tracks: [ { label: string, kind: string, default: boolean } ] }
NON_NATIVE_TEXT_TRACKS_FOUND = 'hlsNonNativeTextTracksFound',
// fired when the first timestamp is found - data: { id : demuxer id, initPTS: initPTS, timescale: timescale, frag : fragment object }
INIT_PTS_FOUND = 'hlsInitPtsFound',
// fired when a fragment loading starts - data: { frag : fragment object }
FRAG_LOADING = 'hlsFragLoading',
// fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded } }
// FRAG_LOAD_PROGRESS = 'hlsFragLoadProgress',
// Identifier for fragment load aborting for emergency switch down - data: { frag : fragment object }
FRAG_LOAD_EMERGENCY_ABORTED = 'hlsFragLoadEmergencyAborted',
// fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : LoaderStats }
FRAG_LOADED = 'hlsFragLoaded',
// fired when a fragment has finished decrypting - data: { id : demuxer id, frag: fragment object, payload : fragment payload, stats : { tstart, tdecrypt } }
FRAG_DECRYPTED = 'hlsFragDecrypted',
// fired when Init Segment has been extracted from fragment - data: { id : demuxer id, frag: fragment object, moov : moov MP4 box, codecs : codecs found while parsing fragment }
FRAG_PARSING_INIT_SEGMENT = 'hlsFragParsingInitSegment',
// fired when parsing sei text is completed - data: { id : demuxer id, frag: fragment object, samples : [ sei samples pes ] }
FRAG_PARSING_USERDATA = 'hlsFragParsingUserdata',
// fired when parsing id3 is completed - data: { id : demuxer id, frag: fragment object, samples : [ id3 samples pes ] }
FRAG_PARSING_METADATA = 'hlsFragParsingMetadata',
// fired when data have been extracted from fragment - data: { id : demuxer id, frag: fragment object, data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
// FRAG_PARSING_DATA = 'hlsFragParsingData',
// fired when fragment parsing is completed - data: { id : demuxer id, frag: fragment object }
FRAG_PARSED = 'hlsFragParsed',
// fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { id : demuxer id, frag : fragment object, stats : LoaderStats }
FRAG_BUFFERED = 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { id : demuxer id, frag : fragment object }
FRAG_CHANGED = 'hlsFragChanged',
// Identifier for a FPS drop event - data: { currentDropped, currentDecoded, totalDroppedFrames }
FPS_DROP = 'hlsFpsDrop',
// triggered when FPS drop triggers auto level capping - data: { level, droppedLevel }
FPS_DROP_LEVEL_CAPPING = 'hlsFpsDropLevelCapping',
// triggered when maxAutoLevel changes - data { autoLevelCapping, levels, maxAutoLevel, minAutoLevel, maxHdcpLevel }
MAX_AUTO_LEVEL_UPDATED = 'hlsMaxAutoLevelUpdated',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data }
ERROR = 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example - data: { }
DESTROYING = 'hlsDestroying',
// fired when a decrypt key loading starts - data: { frag : fragment object }
KEY_LOADING = 'hlsKeyLoading',
// fired when a decrypt key loading is completed - data: { frag : fragment object, keyInfo : KeyLoaderInfo }
KEY_LOADED = 'hlsKeyLoaded',
// deprecated; please use BACK_BUFFER_REACHED - data : { bufferEnd: number }
LIVE_BACK_BUFFER_REACHED = 'hlsLiveBackBufferReached',
// fired when the back buffer is reached as defined by the backBufferLength config option - data : { bufferEnd: number }
BACK_BUFFER_REACHED = 'hlsBackBufferReached',
// fired after steering manifest has been loaded - data: { steeringManifest: SteeringManifest object, url: steering manifest URL }
STEERING_MANIFEST_LOADED = 'hlsSteeringManifestLoaded',
}
/**
* Defines each Event type and payload by Event name. Used in {@link hls.js#HlsEventEmitter} to strongly type the event listener API.
*/
export interface HlsListeners {
[Events.MEDIA_ATTACHING]: (
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData,
) => void;
[Events.MEDIA_ATTACHED]: (
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData,
) => void;
[Events.MEDIA_DETACHING]: (event: Events.MEDIA_DETACHING) => void;
[Events.MEDIA_DETACHED]: (event: Events.MEDIA_DETACHED) => void;
[Events.BUFFER_RESET]: (event: Events.BUFFER_RESET) => void;
[Events.BUFFER_CODECS]: (
event: Events.BUFFER_CODECS,
data: BufferCodecsData,
) => void;
[Events.BUFFER_CREATED]: (
event: Events.BUFFER_CREATED,
data: BufferCreatedData,
) => void;
[Events.BUFFER_APPENDING]: (
event: Events.BUFFER_APPENDING,
data: BufferAppendingData,
) => void;
[Events.BUFFER_APPENDED]: (
event: Events.BUFFER_APPENDED,
data: BufferAppendedData,
) => void;
[Events.BUFFER_EOS]: (event: Events.BUFFER_EOS, data: BufferEOSData) => void;
[Events.BUFFER_FLUSHING]: (
event: Events.BUFFER_FLUSHING,
data: BufferFlushingData,
) => void;
[Events.BUFFER_FLUSHED]: (
event: Events.BUFFER_FLUSHED,
data: BufferFlushedData,
) => void;
[Events.MANIFEST_LOADING]: (
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData,
) => void;
[Events.MANIFEST_LOADED]: (
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData,
) => void;
[Events.MANIFEST_PARSED]: (
event: Events.MANIFEST_PARSED,
data: ManifestParsedData,
) => void;
[Events.LEVEL_SWITCHING]: (
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData,
) => void;
[Events.LEVEL_SWITCHED]: (
event: Events.LEVEL_SWITCHED,
data: LevelSwitchedData,
) => void;
[Events.LEVEL_LOADING]: (
event: Events.LEVEL_LOADING,
data: LevelLoadingData,
) => void;
[Events.LEVEL_LOADED]: (
event: Events.LEVEL_LOADED,
data: LevelLoadedData,
) => void;
[Events.LEVEL_UPDATED]: (
event: Events.LEVEL_UPDATED,
data: LevelUpdatedData,
) => void;
[Events.LEVEL_PTS_UPDATED]: (
event: Events.LEVEL_PTS_UPDATED,
data: LevelPTSUpdatedData,
) => void;
[Events.LEVELS_UPDATED]: (
event: Events.LEVELS_UPDATED,
data: LevelsUpdatedData,
) => void;
[Events.AUDIO_TRACKS_UPDATED]: (
event: Events.AUDIO_TRACKS_UPDATED,
data: AudioTracksUpdatedData,
) => void;
[Events.AUDIO_TRACK_SWITCHING]: (
event: Events.AUDIO_TRACK_SWITCHING,
data: AudioTrackSwitchingData,
) => void;
[Events.AUDIO_TRACK_SWITCHED]: (
event: Events.AUDIO_TRACK_SWITCHED,
data: AudioTrackSwitchedData,
) => void;
[Events.AUDIO_TRACK_LOADING]: (
event: Events.AUDIO_TRACK_LOADING,
data: TrackLoadingData,
) => void;
[Events.AUDIO_TRACK_LOADED]: (
event: Events.AUDIO_TRACK_LOADED,
data: AudioTrackLoadedData,
) => void;
[Events.SUBTITLE_TRACKS_UPDATED]: (
event: Events.SUBTITLE_TRACKS_UPDATED,
data: SubtitleTracksUpdatedData,
) => void;
[Events.SUBTITLE_TRACKS_CLEARED]: (
event: Events.SUBTITLE_TRACKS_CLEARED,
) => void;
[Events.SUBTITLE_TRACK_SWITCH]: (
event: Events.SUBTITLE_TRACK_SWITCH,
data: SubtitleTrackSwitchData,
) => void;
[Events.SUBTITLE_TRACK_LOADING]: (
event: Events.SUBTITLE_TRACK_LOADING,
data: TrackLoadingData,
) => void;
[Events.SUBTITLE_TRACK_LOADED]: (
event: Events.SUBTITLE_TRACK_LOADED,
data: SubtitleTrackLoadedData,
) => void;
[Events.SUBTITLE_FRAG_PROCESSED]: (
event: Events.SUBTITLE_FRAG_PROCESSED,
data: SubtitleFragProcessedData,
) => void;
[Events.CUES_PARSED]: (
event: Events.CUES_PARSED,
data: CuesParsedData,
) => void;
[Events.NON_NATIVE_TEXT_TRACKS_FOUND]: (
event: Events.NON_NATIVE_TEXT_TRACKS_FOUND,
data: NonNativeTextTracksData,
) => void;
[Events.INIT_PTS_FOUND]: (
event: Events.INIT_PTS_FOUND,
data: InitPTSFoundData,
) => void;
[Events.FRAG_LOADING]: (
event: Events.FRAG_LOADING,
data: FragLoadingData,
) => void;
// [Events.FRAG_LOAD_PROGRESS]: TodoEventType
[Events.FRAG_LOAD_EMERGENCY_ABORTED]: (
event: Events.FRAG_LOAD_EMERGENCY_ABORTED,
data: FragLoadEmergencyAbortedData,
) => void;
[Events.FRAG_LOADED]: (
event: Events.FRAG_LOADED,
data: FragLoadedData,
) => void;
[Events.FRAG_DECRYPTED]: (
event: Events.FRAG_DECRYPTED,
data: FragDecryptedData,
) => void;
[Events.FRAG_PARSING_INIT_SEGMENT]: (
event: Events.FRAG_PARSING_INIT_SEGMENT,
data: FragParsingInitSegmentData,
) => void;
[Events.FRAG_PARSING_USERDATA]: (
event: Events.FRAG_PARSING_USERDATA,
data: FragParsingUserdataData,
) => void;
[Events.FRAG_PARSING_METADATA]: (
event: Events.FRAG_PARSING_METADATA,
data: FragParsingMetadataData,
) => void;
// [Events.FRAG_PARSING_DATA]: TodoEventType
[Events.FRAG_PARSED]: (
event: Events.FRAG_PARSED,
data: FragParsedData,
) => void;
[Events.FRAG_BUFFERED]: (
event: Events.FRAG_BUFFERED,
data: FragBufferedData,
) => void;
[Events.FRAG_CHANGED]: (
event: Events.FRAG_CHANGED,
data: FragChangedData,
) => void;
[Events.FPS_DROP]: (event: Events.FPS_DROP, data: FPSDropData) => void;
[Events.FPS_DROP_LEVEL_CAPPING]: (
event: Events.FPS_DROP_LEVEL_CAPPING,
data: FPSDropLevelCappingData,
) => void;
[Events.MAX_AUTO_LEVEL_UPDATED]: (
event: Events.MAX_AUTO_LEVEL_UPDATED,
data: MaxAutoLevelUpdatedData,
) => void;
[Events.ERROR]: (event: Events.ERROR, data: ErrorData) => void;
[Events.DESTROYING]: (event: Events.DESTROYING) => void;
[Events.KEY_LOADING]: (
event: Events.KEY_LOADING,
data: KeyLoadingData,
) => void;
[Events.KEY_LOADED]: (event: Events.KEY_LOADED, data: KeyLoadedData) => void;
[Events.LIVE_BACK_BUFFER_REACHED]: (
event: Events.LIVE_BACK_BUFFER_REACHED,
data: LiveBackBufferData,
) => void;
[Events.BACK_BUFFER_REACHED]: (
event: Events.BACK_BUFFER_REACHED,
data: BackBufferData,
) => void;
[Events.STEERING_MANIFEST_LOADED]: (
event: Events.STEERING_MANIFEST_LOADED,
data: SteeringManifestLoadedData,
) => void;
}
export interface HlsEventEmitter {
on<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context?: Context,
): void;
once<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context?: Context,
): void;
removeAllListeners<E extends keyof HlsListeners>(event?: E): void;
off<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener?: HlsListeners[E],
context?: Context,
once?: boolean,
): void;
listeners<E extends keyof HlsListeners>(event: E): HlsListeners[E][];
emit<E extends keyof HlsListeners>(
event: E,
name: E,
eventObject: Parameters<HlsListeners[E]>[1],
): boolean;
listenerCount<E extends keyof HlsListeners>(event: E): number;
}

3
server/node_modules/hls.js/src/exports-default.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import Hls from './hls';
export default Hls;

59
server/node_modules/hls.js/src/exports-named.ts generated vendored Normal file
View File

@@ -0,0 +1,59 @@
import Hls from './hls';
import { Events } from './events';
import { ErrorTypes, ErrorDetails } from './errors';
import { Level } from './types/level';
import AbrController from './controller/abr-controller';
import AudioTrackController from './controller/audio-track-controller';
import AudioStreamController from './controller/audio-stream-controller';
import BasePlaylistController from './controller/base-playlist-controller';
import BaseStreamController from './controller/base-stream-controller';
import BufferController from './controller/buffer-controller';
import CapLevelController from './controller/cap-level-controller';
import CMCDController from './controller/cmcd-controller';
import ContentSteeringController from './controller/content-steering-controller';
import EMEController from './controller/eme-controller';
import ErrorController from './controller/error-controller';
import FPSController from './controller/fps-controller';
import SubtitleTrackController from './controller/subtitle-track-controller';
export default Hls;
export {
Hls,
ErrorDetails,
ErrorTypes,
Events,
Level,
AbrController,
AudioStreamController,
AudioTrackController,
BasePlaylistController,
BaseStreamController,
BufferController,
CapLevelController,
CMCDController,
ContentSteeringController,
EMEController,
ErrorController,
FPSController,
SubtitleTrackController,
};
export { SubtitleStreamController } from './controller/subtitle-stream-controller';
export { TimelineController } from './controller/timeline-controller';
export { KeySystems, KeySystemFormats } from './utils/mediakeys-helper';
export { DateRange } from './loader/date-range';
export { LoadStats } from './loader/load-stats';
export { LevelKey } from './loader/level-key';
export { LevelDetails } from './loader/level-details';
export { MetadataSchema } from './types/demuxer';
export { HlsSkip, HlsUrlParameters } from './types/level';
export { PlaylistLevelType } from './types/loader';
export { ChunkMetadata } from './types/transmuxer';
export { BaseSegment, Fragment, Part } from './loader/fragment';
export {
NetworkErrorAction,
ErrorActionFlags,
} from './controller/error-controller';
export { AttrList } from './utils/attr-list';
export { isSupported, isMSESupported } from './is-supported';
export { getMediaSource } from './utils/mediasource-helper';

1170
server/node_modules/hls.js/src/hls.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

54
server/node_modules/hls.js/src/is-supported.ts generated vendored Normal file
View File

@@ -0,0 +1,54 @@
import { getMediaSource } from './utils/mediasource-helper';
import { mimeTypeForCodec } from './utils/codecs';
import type { ExtendedSourceBuffer } from './types/buffer';
function getSourceBuffer(): typeof self.SourceBuffer {
return self.SourceBuffer || (self as any).WebKitSourceBuffer;
}
export function isMSESupported(): boolean {
const mediaSource = getMediaSource();
if (!mediaSource) {
return false;
}
// if SourceBuffer is exposed ensure its API is valid
// Older browsers do not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
const sourceBuffer = getSourceBuffer();
return (
!sourceBuffer ||
(sourceBuffer.prototype &&
typeof sourceBuffer.prototype.appendBuffer === 'function' &&
typeof sourceBuffer.prototype.remove === 'function')
);
}
export function isSupported(): boolean {
if (!isMSESupported()) {
return false;
}
const mediaSource = getMediaSource();
return (
typeof mediaSource?.isTypeSupported === 'function' &&
(['avc1.42E01E,mp4a.40.2', 'av01.0.01M.08', 'vp09.00.50.08'].some(
(codecsForVideoContainer) =>
mediaSource.isTypeSupported(
mimeTypeForCodec(codecsForVideoContainer, 'video'),
),
) ||
['mp4a.40.2', 'fLaC'].some((codecForAudioContainer) =>
mediaSource.isTypeSupported(
mimeTypeForCodec(codecForAudioContainer, 'audio'),
),
))
);
}
export function changeTypeSupported(): boolean {
const sourceBuffer = getSourceBuffer();
return (
typeof (sourceBuffer?.prototype as ExtendedSourceBuffer)?.changeType ===
'function'
);
}

132
server/node_modules/hls.js/src/loader/date-range.ts generated vendored Normal file
View File

@@ -0,0 +1,132 @@
import { AttrList } from '../utils/attr-list';
import { logger } from '../utils/logger';
// Avoid exporting const enum so that these values can be inlined
const enum DateRangeAttribute {
ID = 'ID',
CLASS = 'CLASS',
START_DATE = 'START-DATE',
DURATION = 'DURATION',
END_DATE = 'END-DATE',
END_ON_NEXT = 'END-ON-NEXT',
PLANNED_DURATION = 'PLANNED-DURATION',
SCTE35_OUT = 'SCTE35-OUT',
SCTE35_IN = 'SCTE35-IN',
}
export function isDateRangeCueAttribute(attrName: string): boolean {
return (
attrName !== DateRangeAttribute.ID &&
attrName !== DateRangeAttribute.CLASS &&
attrName !== DateRangeAttribute.START_DATE &&
attrName !== DateRangeAttribute.DURATION &&
attrName !== DateRangeAttribute.END_DATE &&
attrName !== DateRangeAttribute.END_ON_NEXT
);
}
export function isSCTE35Attribute(attrName: string): boolean {
return (
attrName === DateRangeAttribute.SCTE35_OUT ||
attrName === DateRangeAttribute.SCTE35_IN
);
}
export class DateRange {
public attr: AttrList;
private _startDate: Date;
private _endDate?: Date;
private _badValueForSameId?: string;
constructor(dateRangeAttr: AttrList, dateRangeWithSameId?: DateRange) {
if (dateRangeWithSameId) {
const previousAttr = dateRangeWithSameId.attr;
for (const key in previousAttr) {
if (
Object.prototype.hasOwnProperty.call(dateRangeAttr, key) &&
dateRangeAttr[key] !== previousAttr[key]
) {
logger.warn(
`DATERANGE tag attribute: "${key}" does not match for tags with ID: "${dateRangeAttr.ID}"`,
);
this._badValueForSameId = key;
break;
}
}
// Merge DateRange tags with the same ID
dateRangeAttr = Object.assign(
new AttrList({}),
previousAttr,
dateRangeAttr,
);
}
this.attr = dateRangeAttr;
this._startDate = new Date(dateRangeAttr[DateRangeAttribute.START_DATE]);
if (DateRangeAttribute.END_DATE in this.attr) {
const endDate = new Date(this.attr[DateRangeAttribute.END_DATE]);
if (Number.isFinite(endDate.getTime())) {
this._endDate = endDate;
}
}
}
get id(): string {
return this.attr.ID;
}
get class(): string {
return this.attr.CLASS;
}
get startDate(): Date {
return this._startDate;
}
get endDate(): Date | null {
if (this._endDate) {
return this._endDate;
}
const duration = this.duration;
if (duration !== null) {
return new Date(this._startDate.getTime() + duration * 1000);
}
return null;
}
get duration(): number | null {
if (DateRangeAttribute.DURATION in this.attr) {
const duration = this.attr.decimalFloatingPoint(
DateRangeAttribute.DURATION,
);
if (Number.isFinite(duration)) {
return duration;
}
} else if (this._endDate) {
return (this._endDate.getTime() - this._startDate.getTime()) / 1000;
}
return null;
}
get plannedDuration(): number | null {
if (DateRangeAttribute.PLANNED_DURATION in this.attr) {
return this.attr.decimalFloatingPoint(
DateRangeAttribute.PLANNED_DURATION,
);
}
return null;
}
get endOnNext(): boolean {
return this.attr.bool(DateRangeAttribute.END_ON_NEXT);
}
get isValid(): boolean {
return (
!!this.id &&
!this._badValueForSameId &&
Number.isFinite(this.startDate.getTime()) &&
(this.duration === null || this.duration >= 0) &&
(!this.endOnNext || !!this.class)
);
}
}

View File

@@ -0,0 +1,399 @@
import { ErrorTypes, ErrorDetails } from '../errors';
import { Fragment } from './fragment';
import {
Loader,
LoaderConfiguration,
FragmentLoaderContext,
} from '../types/loader';
import { getLoaderConfigWithoutReties } from '../utils/error-helper';
import type { HlsConfig } from '../config';
import type { BaseSegment, Part } from './fragment';
import type {
ErrorData,
FragLoadedData,
PartsLoadedData,
} from '../types/events';
const MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb
export default class FragmentLoader {
private readonly config: HlsConfig;
private loader: Loader<FragmentLoaderContext> | null = null;
private partLoadTimeout: number = -1;
constructor(config: HlsConfig) {
this.config = config;
}
destroy() {
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
}
abort() {
if (this.loader) {
// Abort the loader for current fragment. Only one may load at any given time
this.loader.abort();
}
}
load(
frag: Fragment,
onProgress?: FragmentLoadProgressCallback,
): Promise<FragLoadedData> {
const url = frag.url;
if (!url) {
return Promise.reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
error: new Error(
`Fragment does not have a ${url ? 'part list' : 'url'}`,
),
networkDetails: null,
}),
);
}
this.abort();
const config = this.config;
const FragmentILoader = config.fLoader;
const DefaultILoader = config.loader;
return new Promise((resolve, reject) => {
if (this.loader) {
this.loader.destroy();
}
if (frag.gap) {
if (frag.tagList.some((tags) => tags[0] === 'GAP')) {
reject(createGapLoadError(frag));
return;
} else {
// Reset temporary treatment as GAP tag
frag.gap = false;
}
}
const loader =
(this.loader =
frag.loader =
FragmentILoader
? new FragmentILoader(config)
: (new DefaultILoader(config) as Loader<FragmentLoaderContext>));
const loaderContext = createLoaderContext(frag);
const loadPolicy = getLoaderConfigWithoutReties(
config.fragLoadPolicy.default,
);
const loaderConfig: LoaderConfiguration = {
loadPolicy,
timeout: loadPolicy.maxLoadTimeMs,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: 0,
highWaterMark: frag.sn === 'initSegment' ? Infinity : MIN_CHUNK_SIZE,
};
// Assign frag stats to the loader's stats reference
frag.stats = loader.stats;
loader.load(loaderContext, loaderConfig, {
onSuccess: (response, stats, context, networkDetails) => {
this.resetLoader(frag, loader);
let payload = response.data as ArrayBuffer;
if (context.resetIV && frag.decryptdata) {
frag.decryptdata.iv = new Uint8Array(payload.slice(0, 16));
payload = payload.slice(16);
}
resolve({
frag,
part: null,
payload,
networkDetails,
});
},
onError: (response, context, networkDetails, stats) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
response: { url, data: undefined, ...response },
error: new Error(`HTTP Error ${response.code} ${response.text}`),
networkDetails,
stats,
}),
);
},
onAbort: (stats, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.INTERNAL_ABORTED,
fatal: false,
frag,
error: new Error('Aborted'),
networkDetails,
stats,
}),
);
},
onTimeout: (stats, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_TIMEOUT,
fatal: false,
frag,
error: new Error(`Timeout after ${loaderConfig.timeout}ms`),
networkDetails,
stats,
}),
);
},
onProgress: (stats, context, data, networkDetails) => {
if (onProgress) {
onProgress({
frag,
part: null,
payload: data as ArrayBuffer,
networkDetails,
});
}
},
});
});
}
public loadPart(
frag: Fragment,
part: Part,
onProgress: FragmentLoadProgressCallback,
): Promise<FragLoadedData> {
this.abort();
const config = this.config;
const FragmentILoader = config.fLoader;
const DefaultILoader = config.loader;
return new Promise((resolve, reject) => {
if (this.loader) {
this.loader.destroy();
}
if (frag.gap || part.gap) {
reject(createGapLoadError(frag, part));
return;
}
const loader =
(this.loader =
frag.loader =
FragmentILoader
? new FragmentILoader(config)
: (new DefaultILoader(config) as Loader<FragmentLoaderContext>));
const loaderContext = createLoaderContext(frag, part);
// Should we define another load policy for parts?
const loadPolicy = getLoaderConfigWithoutReties(
config.fragLoadPolicy.default,
);
const loaderConfig: LoaderConfiguration = {
loadPolicy,
timeout: loadPolicy.maxLoadTimeMs,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: 0,
highWaterMark: MIN_CHUNK_SIZE,
};
// Assign part stats to the loader's stats reference
part.stats = loader.stats;
loader.load(loaderContext, loaderConfig, {
onSuccess: (response, stats, context, networkDetails) => {
this.resetLoader(frag, loader);
this.updateStatsFromPart(frag, part);
const partLoadedData: FragLoadedData = {
frag,
part,
payload: response.data as ArrayBuffer,
networkDetails,
};
onProgress(partLoadedData);
resolve(partLoadedData);
},
onError: (response, context, networkDetails, stats) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
part,
response: {
url: loaderContext.url,
data: undefined,
...response,
},
error: new Error(`HTTP Error ${response.code} ${response.text}`),
networkDetails,
stats,
}),
);
},
onAbort: (stats, context, networkDetails) => {
frag.stats.aborted = part.stats.aborted;
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.INTERNAL_ABORTED,
fatal: false,
frag,
part,
error: new Error('Aborted'),
networkDetails,
stats,
}),
);
},
onTimeout: (stats, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_TIMEOUT,
fatal: false,
frag,
part,
error: new Error(`Timeout after ${loaderConfig.timeout}ms`),
networkDetails,
stats,
}),
);
},
});
});
}
private updateStatsFromPart(frag: Fragment, part: Part) {
const fragStats = frag.stats;
const partStats = part.stats;
const partTotal = partStats.total;
fragStats.loaded += partStats.loaded;
if (partTotal) {
const estTotalParts = Math.round(frag.duration / part.duration);
const estLoadedParts = Math.min(
Math.round(fragStats.loaded / partTotal),
estTotalParts,
);
const estRemainingParts = estTotalParts - estLoadedParts;
const estRemainingBytes =
estRemainingParts * Math.round(fragStats.loaded / estLoadedParts);
fragStats.total = fragStats.loaded + estRemainingBytes;
} else {
fragStats.total = Math.max(fragStats.loaded, fragStats.total);
}
const fragLoading = fragStats.loading;
const partLoading = partStats.loading;
if (fragLoading.start) {
// add to fragment loader latency
fragLoading.first += partLoading.first - partLoading.start;
} else {
fragLoading.start = partLoading.start;
fragLoading.first = partLoading.first;
}
fragLoading.end = partLoading.end;
}
private resetLoader(frag: Fragment, loader: Loader<FragmentLoaderContext>) {
frag.loader = null;
if (this.loader === loader) {
self.clearTimeout(this.partLoadTimeout);
this.loader = null;
}
loader.destroy();
}
}
function createLoaderContext(
frag: Fragment,
part: Part | null = null,
): FragmentLoaderContext {
const segment: BaseSegment = part || frag;
const loaderContext: FragmentLoaderContext = {
frag,
part,
responseType: 'arraybuffer',
url: segment.url,
headers: {},
rangeStart: 0,
rangeEnd: 0,
};
const start = segment.byteRangeStartOffset as number;
const end = segment.byteRangeEndOffset as number;
if (Number.isFinite(start) && Number.isFinite(end)) {
let byteRangeStart = start;
let byteRangeEnd = end;
if (frag.sn === 'initSegment' && frag.decryptdata?.method === 'AES-128') {
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
// has the unencrypted size specified in the range.
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
const fragmentLen = end - start;
if (fragmentLen % 16) {
byteRangeEnd = end + (16 - (fragmentLen % 16));
}
if (start !== 0) {
loaderContext.resetIV = true;
byteRangeStart = start - 16;
}
}
loaderContext.rangeStart = byteRangeStart;
loaderContext.rangeEnd = byteRangeEnd;
}
return loaderContext;
}
function createGapLoadError(frag: Fragment, part?: Part): LoadError {
const error = new Error(`GAP ${frag.gap ? 'tag' : 'attribute'} found`);
const errorData: FragLoadFailResult = {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_GAP,
fatal: false,
frag,
error,
networkDetails: null,
};
if (part) {
errorData.part = part;
}
(part ? part : frag).stats.aborted = true;
return new LoadError(errorData);
}
export class LoadError extends Error {
public readonly data: FragLoadFailResult;
constructor(data: FragLoadFailResult) {
super(data.error.message);
this.data = data;
}
}
export interface FragLoadFailResult extends ErrorData {
frag: Fragment;
part?: Part;
response?: {
data: any;
// error status code
code: number;
// error description
text: string;
url: string;
};
networkDetails: any;
}
export type FragmentLoadProgressCallback = (
result: FragLoadedData | PartsLoadedData,
) => void;

320
server/node_modules/hls.js/src/loader/fragment.ts generated vendored Normal file
View File

@@ -0,0 +1,320 @@
import { buildAbsoluteURL } from 'url-toolkit';
import { LevelKey } from './level-key';
import { LoadStats } from './load-stats';
import { AttrList } from '../utils/attr-list';
import type {
FragmentLoaderContext,
KeyLoaderContext,
Loader,
PlaylistLevelType,
} from '../types/loader';
import type { KeySystemFormats } from '../utils/mediakeys-helper';
export const enum ElementaryStreamTypes {
AUDIO = 'audio',
VIDEO = 'video',
AUDIOVIDEO = 'audiovideo',
}
export interface ElementaryStreamInfo {
startPTS: number;
endPTS: number;
startDTS: number;
endDTS: number;
partial?: boolean;
}
export type ElementaryStreams = Record<
ElementaryStreamTypes,
ElementaryStreamInfo | null
>;
export class BaseSegment {
private _byteRange: [number, number] | null = null;
private _url: string | null = null;
// baseurl is the URL to the playlist
public readonly baseurl: string;
// relurl is the portion of the URL that comes from inside the playlist.
public relurl?: string;
// Holds the types of data this fragment supports
public elementaryStreams: ElementaryStreams = {
[ElementaryStreamTypes.AUDIO]: null,
[ElementaryStreamTypes.VIDEO]: null,
[ElementaryStreamTypes.AUDIOVIDEO]: null,
};
constructor(baseurl: string) {
this.baseurl = baseurl;
}
// setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
setByteRange(value: string, previous?: BaseSegment) {
const params = value.split('@', 2);
let start: number;
if (params.length === 1) {
start = previous?.byteRangeEndOffset || 0;
} else {
start = parseInt(params[1]);
}
this._byteRange = [start, parseInt(params[0]) + start];
}
get byteRange(): [number, number] | [] {
if (!this._byteRange) {
return [];
}
return this._byteRange;
}
get byteRangeStartOffset(): number | undefined {
return this.byteRange[0];
}
get byteRangeEndOffset(): number | undefined {
return this.byteRange[1];
}
get url(): string {
if (!this._url && this.baseurl && this.relurl) {
this._url = buildAbsoluteURL(this.baseurl, this.relurl, {
alwaysNormalize: true,
});
}
return this._url || '';
}
set url(value: string) {
this._url = value;
}
}
/**
* Object representing parsed data from an HLS Segment. Found in {@link hls.js#LevelDetails.fragments}.
*/
export class Fragment extends BaseSegment {
private _decryptdata: LevelKey | null = null;
public rawProgramDateTime: string | null = null;
public programDateTime: number | null = null;
public tagList: Array<string[]> = [];
// EXTINF has to be present for a m3u8 to be considered valid
public duration: number = 0;
// sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
public sn: number | 'initSegment' = 0;
// levelkeys are the EXT-X-KEY tags that apply to this segment for decryption
// core difference from the private field _decryptdata is the lack of the initialized IV
// _decryptdata will set the IV for this segment based on the segment number in the fragment
public levelkeys?: { [key: string]: LevelKey };
// A string representing the fragment type
public readonly type: PlaylistLevelType;
// A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
public loader: Loader<FragmentLoaderContext> | null = null;
// A reference to the key loader. Set while the key is loading, and removed afterwards. Used to abort key loading
public keyLoader: Loader<KeyLoaderContext> | null = null;
// The level/track index to which the fragment belongs
public level: number = -1;
// The continuity counter of the fragment
public cc: number = 0;
// The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
public startPTS?: number;
// The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
public endPTS?: number;
// The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
public startDTS!: number;
// The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
public endDTS!: number;
// The start time of the fragment, as listed in the manifest. Updated after transmux complete.
public start: number = 0;
// Set by `updateFragPTSDTS` in level-helper
public deltaPTS?: number;
// The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
public maxStartPTS?: number;
// The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
public minEndPTS?: number;
// Load/parse timing information
public stats: LoadStats = new LoadStats();
// Init Segment bytes (unset for media segments)
public data?: Uint8Array;
// A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
public bitrateTest: boolean = false;
// #EXTINF segment title
public title: string | null = null;
// The Media Initialization Section for this segment
public initSegment: Fragment | null = null;
// Fragment is the last fragment in the media playlist
public endList?: boolean;
// Fragment is marked by an EXT-X-GAP tag indicating that it does not contain media data and should not be loaded
public gap?: boolean;
// Deprecated
public urlId: number = 0;
constructor(type: PlaylistLevelType, baseurl: string) {
super(baseurl);
this.type = type;
}
get decryptdata(): LevelKey | null {
const { levelkeys } = this;
if (!levelkeys && !this._decryptdata) {
return null;
}
if (!this._decryptdata && this.levelkeys && !this.levelkeys.NONE) {
const key = this.levelkeys.identity;
if (key) {
this._decryptdata = key.getDecryptData(this.sn);
} else {
const keyFormats = Object.keys(this.levelkeys);
if (keyFormats.length === 1) {
return (this._decryptdata = this.levelkeys[
keyFormats[0]
].getDecryptData(this.sn));
} else {
// Multiple keys. key-loader to call Fragment.setKeyFormat based on selected key-system.
}
}
}
return this._decryptdata;
}
get end(): number {
return this.start + this.duration;
}
get endProgramDateTime() {
if (this.programDateTime === null) {
return null;
}
if (!Number.isFinite(this.programDateTime)) {
return null;
}
const duration = !Number.isFinite(this.duration) ? 0 : this.duration;
return this.programDateTime + duration * 1000;
}
get encrypted() {
// At the m3u8-parser level we need to add support for manifest signalled keyformats
// when we want the fragment to start reporting that it is encrypted.
// Currently, keyFormat will only be set for identity keys
if (this._decryptdata?.encrypted) {
return true;
} else if (this.levelkeys) {
const keyFormats = Object.keys(this.levelkeys);
const len = keyFormats.length;
if (len > 1 || (len === 1 && this.levelkeys[keyFormats[0]].encrypted)) {
return true;
}
}
return false;
}
setKeyFormat(keyFormat: KeySystemFormats) {
if (this.levelkeys) {
const key = this.levelkeys[keyFormat];
if (key && !this._decryptdata) {
this._decryptdata = key.getDecryptData(this.sn);
}
}
}
abortRequests(): void {
this.loader?.abort();
this.keyLoader?.abort();
}
setElementaryStreamInfo(
type: ElementaryStreamTypes,
startPTS: number,
endPTS: number,
startDTS: number,
endDTS: number,
partial: boolean = false,
) {
const { elementaryStreams } = this;
const info = elementaryStreams[type];
if (!info) {
elementaryStreams[type] = {
startPTS,
endPTS,
startDTS,
endDTS,
partial,
};
return;
}
info.startPTS = Math.min(info.startPTS, startPTS);
info.endPTS = Math.max(info.endPTS, endPTS);
info.startDTS = Math.min(info.startDTS, startDTS);
info.endDTS = Math.max(info.endDTS, endDTS);
}
clearElementaryStreamInfo() {
const { elementaryStreams } = this;
elementaryStreams[ElementaryStreamTypes.AUDIO] = null;
elementaryStreams[ElementaryStreamTypes.VIDEO] = null;
elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null;
}
}
/**
* Object representing parsed data from an HLS Partial Segment. Found in {@link hls.js#LevelDetails.partList}.
*/
export class Part extends BaseSegment {
public readonly fragOffset: number = 0;
public readonly duration: number = 0;
public readonly gap: boolean = false;
public readonly independent: boolean = false;
public readonly relurl: string;
public readonly fragment: Fragment;
public readonly index: number;
public stats: LoadStats = new LoadStats();
constructor(
partAttrs: AttrList,
frag: Fragment,
baseurl: string,
index: number,
previous?: Part,
) {
super(baseurl);
this.duration = partAttrs.decimalFloatingPoint('DURATION');
this.gap = partAttrs.bool('GAP');
this.independent = partAttrs.bool('INDEPENDENT');
this.relurl = partAttrs.enumeratedString('URI') as string;
this.fragment = frag;
this.index = index;
const byteRange = partAttrs.enumeratedString('BYTERANGE');
if (byteRange) {
this.setByteRange(byteRange, previous);
}
if (previous) {
this.fragOffset = previous.fragOffset + previous.duration;
}
}
get start(): number {
return this.fragment.start + this.fragOffset;
}
get end(): number {
return this.start + this.duration;
}
get loaded(): boolean {
const { elementaryStreams } = this;
return !!(
elementaryStreams.audio ||
elementaryStreams.video ||
elementaryStreams.audiovideo
);
}
}

361
server/node_modules/hls.js/src/loader/key-loader.ts generated vendored Normal file
View File

@@ -0,0 +1,361 @@
import { ErrorTypes, ErrorDetails } from '../errors';
import {
LoaderStats,
LoaderResponse,
LoaderConfiguration,
LoaderCallbacks,
Loader,
KeyLoaderContext,
PlaylistLevelType,
} from '../types/loader';
import { LoadError } from './fragment-loader';
import type { HlsConfig } from '../config';
import type { Fragment } from '../loader/fragment';
import type { ComponentAPI } from '../types/component-api';
import type { KeyLoadedData } from '../types/events';
import type { LevelKey } from './level-key';
import type EMEController from '../controller/eme-controller';
import type { MediaKeySessionContext } from '../controller/eme-controller';
import type { KeySystemFormats } from '../utils/mediakeys-helper';
export interface KeyLoaderInfo {
decryptdata: LevelKey;
keyLoadPromise: Promise<KeyLoadedData> | null;
loader: Loader<KeyLoaderContext> | null;
mediaKeySessionContext: MediaKeySessionContext | null;
}
export default class KeyLoader implements ComponentAPI {
private readonly config: HlsConfig;
public keyUriToKeyInfo: { [keyuri: string]: KeyLoaderInfo } = {};
public emeController: EMEController | null = null;
constructor(config: HlsConfig) {
this.config = config;
}
abort(type?: PlaylistLevelType) {
for (const uri in this.keyUriToKeyInfo) {
const loader = this.keyUriToKeyInfo[uri].loader;
if (loader) {
if (type && type !== loader.context?.frag.type) {
return;
}
loader.abort();
}
}
}
detach() {
for (const uri in this.keyUriToKeyInfo) {
const keyInfo = this.keyUriToKeyInfo[uri];
// Remove cached EME keys on detach
if (
keyInfo.mediaKeySessionContext ||
keyInfo.decryptdata.isCommonEncryption
) {
delete this.keyUriToKeyInfo[uri];
}
}
}
destroy() {
this.detach();
for (const uri in this.keyUriToKeyInfo) {
const loader = this.keyUriToKeyInfo[uri].loader;
if (loader) {
loader.destroy();
}
}
this.keyUriToKeyInfo = {};
}
createKeyLoadError(
frag: Fragment,
details: ErrorDetails = ErrorDetails.KEY_LOAD_ERROR,
error: Error,
networkDetails?: any,
response?: { url: string; data: undefined; code: number; text: string },
): LoadError {
return new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details,
fatal: false,
frag,
response,
error,
networkDetails,
});
}
loadClear(
loadingFrag: Fragment,
encryptedFragments: Fragment[],
): void | Promise<void> {
if (this.emeController && this.config.emeEnabled) {
// access key-system with nearest key on start (loaidng frag is unencrypted)
const { sn, cc } = loadingFrag;
for (let i = 0; i < encryptedFragments.length; i++) {
const frag = encryptedFragments[i];
if (
cc <= frag.cc &&
(sn === 'initSegment' || frag.sn === 'initSegment' || sn < frag.sn)
) {
this.emeController
.selectKeySystemFormat(frag)
.then((keySystemFormat) => {
frag.setKeyFormat(keySystemFormat);
});
break;
}
}
}
}
load(frag: Fragment): Promise<KeyLoadedData> {
if (
!frag.decryptdata &&
frag.encrypted &&
this.emeController &&
this.config.emeEnabled
) {
// Multiple keys, but none selected, resolve in eme-controller
return this.emeController
.selectKeySystemFormat(frag)
.then((keySystemFormat) => {
return this.loadInternal(frag, keySystemFormat);
});
}
return this.loadInternal(frag);
}
loadInternal(
frag: Fragment,
keySystemFormat?: KeySystemFormats,
): Promise<KeyLoadedData> {
if (keySystemFormat) {
frag.setKeyFormat(keySystemFormat);
}
const decryptdata = frag.decryptdata;
if (!decryptdata) {
const error = new Error(
keySystemFormat
? `Expected frag.decryptdata to be defined after setting format ${keySystemFormat}`
: 'Missing decryption data on fragment in onKeyLoading',
);
return Promise.reject(
this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, error),
);
}
const uri = decryptdata.uri;
if (!uri) {
return Promise.reject(
this.createKeyLoadError(
frag,
ErrorDetails.KEY_LOAD_ERROR,
new Error(`Invalid key URI: "${uri}"`),
),
);
}
let keyInfo = this.keyUriToKeyInfo[uri];
if (keyInfo?.decryptdata.key) {
decryptdata.key = keyInfo.decryptdata.key;
return Promise.resolve({ frag, keyInfo });
}
// Return key load promise as long as it does not have a mediakey session with an unusable key status
if (keyInfo?.keyLoadPromise) {
switch (keyInfo.mediaKeySessionContext?.keyStatus) {
case undefined:
case 'status-pending':
case 'usable':
case 'usable-in-future':
return keyInfo.keyLoadPromise.then((keyLoadedData) => {
// Return the correct fragment with updated decryptdata key and loaded keyInfo
decryptdata.key = keyLoadedData.keyInfo.decryptdata.key;
return { frag, keyInfo };
});
}
// If we have a key session and status and it is not pending or usable, continue
// This will go back to the eme-controller for expired keys to get a new keyLoadPromise
}
// Load the key or return the loading promise
keyInfo = this.keyUriToKeyInfo[uri] = {
decryptdata,
keyLoadPromise: null,
loader: null,
mediaKeySessionContext: null,
};
switch (decryptdata.method) {
case 'ISO-23001-7':
case 'SAMPLE-AES':
case 'SAMPLE-AES-CENC':
case 'SAMPLE-AES-CTR':
if (decryptdata.keyFormat === 'identity') {
// loadKeyHTTP handles http(s) and data URLs
return this.loadKeyHTTP(keyInfo, frag);
}
return this.loadKeyEME(keyInfo, frag);
case 'AES-128':
return this.loadKeyHTTP(keyInfo, frag);
default:
return Promise.reject(
this.createKeyLoadError(
frag,
ErrorDetails.KEY_LOAD_ERROR,
new Error(
`Key supplied with unsupported METHOD: "${decryptdata.method}"`,
),
),
);
}
}
loadKeyEME(keyInfo: KeyLoaderInfo, frag: Fragment): Promise<KeyLoadedData> {
const keyLoadedData: KeyLoadedData = { frag, keyInfo };
if (this.emeController && this.config.emeEnabled) {
const keySessionContextPromise =
this.emeController.loadKey(keyLoadedData);
if (keySessionContextPromise) {
return (keyInfo.keyLoadPromise = keySessionContextPromise.then(
(keySessionContext) => {
keyInfo.mediaKeySessionContext = keySessionContext;
return keyLoadedData;
},
)).catch((error) => {
// Remove promise for license renewal or retry
keyInfo.keyLoadPromise = null;
throw error;
});
}
}
return Promise.resolve(keyLoadedData);
}
loadKeyHTTP(keyInfo: KeyLoaderInfo, frag: Fragment): Promise<KeyLoadedData> {
const config = this.config;
const Loader = config.loader;
const keyLoader = new Loader(config) as Loader<KeyLoaderContext>;
frag.keyLoader = keyInfo.loader = keyLoader;
return (keyInfo.keyLoadPromise = new Promise((resolve, reject) => {
const loaderContext: KeyLoaderContext = {
keyInfo,
frag,
responseType: 'arraybuffer',
url: keyInfo.decryptdata.uri,
};
// maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
// key-loader will trigger an error and rely on stream-controller to handle retry logic.
// this will also align retry logic with fragment-loader
const loadPolicy = config.keyLoadPolicy.default;
const loaderConfig: LoaderConfiguration = {
loadPolicy,
timeout: loadPolicy.maxLoadTimeMs,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: 0,
};
const loaderCallbacks: LoaderCallbacks<KeyLoaderContext> = {
onSuccess: (
response: LoaderResponse,
stats: LoaderStats,
context: KeyLoaderContext,
networkDetails: any,
) => {
const { frag, keyInfo, url: uri } = context;
if (!frag.decryptdata || keyInfo !== this.keyUriToKeyInfo[uri]) {
return reject(
this.createKeyLoadError(
frag,
ErrorDetails.KEY_LOAD_ERROR,
new Error('after key load, decryptdata unset or changed'),
networkDetails,
),
);
}
keyInfo.decryptdata.key = frag.decryptdata.key = new Uint8Array(
response.data as ArrayBuffer,
);
// detach fragment key loader on load success
frag.keyLoader = null;
keyInfo.loader = null;
resolve({ frag, keyInfo });
},
onError: (
response: { code: number; text: string },
context: KeyLoaderContext,
networkDetails: any,
stats: LoaderStats,
) => {
this.resetLoader(context);
reject(
this.createKeyLoadError(
frag,
ErrorDetails.KEY_LOAD_ERROR,
new Error(
`HTTP Error ${response.code} loading key ${response.text}`,
),
networkDetails,
{ url: loaderContext.url, data: undefined, ...response },
),
);
},
onTimeout: (
stats: LoaderStats,
context: KeyLoaderContext,
networkDetails: any,
) => {
this.resetLoader(context);
reject(
this.createKeyLoadError(
frag,
ErrorDetails.KEY_LOAD_TIMEOUT,
new Error('key loading timed out'),
networkDetails,
),
);
},
onAbort: (
stats: LoaderStats,
context: KeyLoaderContext,
networkDetails: any,
) => {
this.resetLoader(context);
reject(
this.createKeyLoadError(
frag,
ErrorDetails.INTERNAL_ABORTED,
new Error('key loading aborted'),
networkDetails,
),
);
},
};
keyLoader.load(loaderContext, loaderConfig, loaderCallbacks);
}));
}
private resetLoader(context: KeyLoaderContext) {
const { frag, keyInfo, url: uri } = context;
const loader = keyInfo.loader;
if (frag.keyLoader === loader) {
frag.keyLoader = null;
keyInfo.loader = null;
}
delete this.keyUriToKeyInfo[uri];
if (loader) {
loader.destroy();
}
}
}

155
server/node_modules/hls.js/src/loader/level-details.ts generated vendored Normal file
View File

@@ -0,0 +1,155 @@
import { Part } from './fragment';
import type { Fragment } from './fragment';
import type { AttrList } from '../utils/attr-list';
import type { DateRange } from './date-range';
import type { VariableMap } from '../types/level';
const DEFAULT_TARGET_DURATION = 10;
/**
* Object representing parsed data from an HLS Media Playlist. Found in {@link hls.js#Level.details}.
*/
export class LevelDetails {
public PTSKnown: boolean = false;
public alignedSliding: boolean = false;
public averagetargetduration?: number;
public endCC: number = 0;
public endSN: number = 0;
public fragments: Fragment[];
public fragmentHint?: Fragment;
public partList: Part[] | null = null;
public dateRanges: Record<string, DateRange>;
public live: boolean = true;
public ageHeader: number = 0;
public advancedDateTime?: number;
public updated: boolean = true;
public advanced: boolean = true;
public availabilityDelay?: number; // Manifest reload synchronization
public misses: number = 0;
public startCC: number = 0;
public startSN: number = 0;
public startTimeOffset: number | null = null;
public targetduration: number = 0;
public totalduration: number = 0;
public type: string | null = null;
public url: string;
public m3u8: string = '';
public version: number | null = null;
public canBlockReload: boolean = false;
public canSkipUntil: number = 0;
public canSkipDateRanges: boolean = false;
public skippedSegments: number = 0;
public recentlyRemovedDateranges?: string[];
public partHoldBack: number = 0;
public holdBack: number = 0;
public partTarget: number = 0;
public preloadHint?: AttrList;
public renditionReports?: AttrList[];
public tuneInGoal: number = 0;
public deltaUpdateFailed?: boolean;
public driftStartTime: number = 0;
public driftEndTime: number = 0;
public driftStart: number = 0;
public driftEnd: number = 0;
public encryptedFragments: Fragment[];
public playlistParsingError: Error | null = null;
public variableList: VariableMap | null = null;
public hasVariableRefs = false;
constructor(baseUrl: string) {
this.fragments = [];
this.encryptedFragments = [];
this.dateRanges = {};
this.url = baseUrl;
}
reloaded(previous: LevelDetails | undefined) {
if (!previous) {
this.advanced = true;
this.updated = true;
return;
}
const partSnDiff = this.lastPartSn - previous.lastPartSn;
const partIndexDiff = this.lastPartIndex - previous.lastPartIndex;
this.updated =
this.endSN !== previous.endSN ||
!!partIndexDiff ||
!!partSnDiff ||
!this.live;
this.advanced =
this.endSN > previous.endSN ||
partSnDiff > 0 ||
(partSnDiff === 0 && partIndexDiff > 0);
if (this.updated || this.advanced) {
this.misses = Math.floor(previous.misses * 0.6);
} else {
this.misses = previous.misses + 1;
}
this.availabilityDelay = previous.availabilityDelay;
}
get hasProgramDateTime(): boolean {
if (this.fragments.length) {
return Number.isFinite(
this.fragments[this.fragments.length - 1].programDateTime as number,
);
}
return false;
}
get levelTargetDuration(): number {
return (
this.averagetargetduration ||
this.targetduration ||
DEFAULT_TARGET_DURATION
);
}
get drift(): number {
const runTime = this.driftEndTime - this.driftStartTime;
if (runTime > 0) {
const runDuration = this.driftEnd - this.driftStart;
return (runDuration * 1000) / runTime;
}
return 1;
}
get edge(): number {
return this.partEnd || this.fragmentEnd;
}
get partEnd(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].end;
}
return this.fragmentEnd;
}
get fragmentEnd(): number {
if (this.fragments?.length) {
return this.fragments[this.fragments.length - 1].end;
}
return 0;
}
get age(): number {
if (this.advancedDateTime) {
return Math.max(Date.now() - this.advancedDateTime, 0) / 1000;
}
return 0;
}
get lastPartIndex(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].index;
}
return -1;
}
get lastPartSn(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].fragment.sn as number;
}
return this.endSN;
}
}

186
server/node_modules/hls.js/src/loader/level-key.ts generated vendored Normal file
View File

@@ -0,0 +1,186 @@
import {
changeEndianness,
convertDataUriToArrayBytes,
} from '../utils/keysystem-util';
import { KeySystemFormats, parsePlayReadyWRM } from '../utils/mediakeys-helper';
import { mp4pssh } from '../utils/mp4-tools';
import { logger } from '../utils/logger';
import { base64Decode } from '../utils/numeric-encoding-utils';
let keyUriToKeyIdMap: { [uri: string]: Uint8Array } = {};
export interface DecryptData {
uri: string;
method: string;
keyFormat: string;
keyFormatVersions: number[];
iv: Uint8Array | null;
key: Uint8Array | null;
keyId: Uint8Array | null;
pssh: Uint8Array | null;
encrypted: boolean;
isCommonEncryption: boolean;
}
export class LevelKey implements DecryptData {
public readonly uri: string;
public readonly method: string;
public readonly keyFormat: string;
public readonly keyFormatVersions: number[];
public readonly encrypted: boolean;
public readonly isCommonEncryption: boolean;
public iv: Uint8Array | null = null;
public key: Uint8Array | null = null;
public keyId: Uint8Array | null = null;
public pssh: Uint8Array | null = null;
static clearKeyUriToKeyIdMap() {
keyUriToKeyIdMap = {};
}
constructor(
method: string,
uri: string,
format: string,
formatversions: number[] = [1],
iv: Uint8Array | null = null,
) {
this.method = method;
this.uri = uri;
this.keyFormat = format;
this.keyFormatVersions = formatversions;
this.iv = iv;
this.encrypted = method ? method !== 'NONE' : false;
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
}
public isSupported(): boolean {
// If it's Segment encryption or No encryption, just select that key system
if (this.method) {
if (this.method === 'AES-128' || this.method === 'NONE') {
return true;
}
if (this.keyFormat === 'identity') {
// Maintain support for clear SAMPLE-AES with MPEG-3 TS
return this.method === 'SAMPLE-AES';
} else if (__USE_EME_DRM__) {
switch (this.keyFormat) {
case KeySystemFormats.FAIRPLAY:
case KeySystemFormats.WIDEVINE:
case KeySystemFormats.PLAYREADY:
case KeySystemFormats.CLEARKEY:
return (
[
'ISO-23001-7',
'SAMPLE-AES',
'SAMPLE-AES-CENC',
'SAMPLE-AES-CTR',
].indexOf(this.method) !== -1
);
}
}
}
return false;
}
public getDecryptData(sn: number | 'initSegment'): LevelKey | null {
if (!this.encrypted || !this.uri) {
return null;
}
if (this.method === 'AES-128' && this.uri && !this.iv) {
if (typeof sn !== 'number') {
// We are fetching decryption data for a initialization segment
// If the segment was encrypted with AES-128
// It must have an IV defined. We cannot substitute the Segment Number in.
if (this.method === 'AES-128' && !this.iv) {
logger.warn(
`missing IV for initialization segment with method="${this.method}" - compliance issue`,
);
}
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
sn = 0;
}
const iv = createInitializationVector(sn);
const decryptdata = new LevelKey(
this.method,
this.uri,
'identity',
this.keyFormatVersions,
iv,
);
return decryptdata;
}
if (!__USE_EME_DRM__) {
return this;
}
// Initialize keyId if possible
const keyBytes = convertDataUriToArrayBytes(this.uri);
if (keyBytes) {
switch (this.keyFormat) {
case KeySystemFormats.WIDEVINE:
// Setting `pssh` on this LevelKey/DecryptData allows HLS.js to generate a session using
// the playlist-key before the "encrypted" event. (Comment out to only use "encrypted" path.)
this.pssh = keyBytes;
// In case of widevine keyID is embedded in PSSH box. Read Key ID.
if (keyBytes.length >= 22) {
this.keyId = keyBytes.subarray(
keyBytes.length - 22,
keyBytes.length - 6,
);
}
break;
case KeySystemFormats.PLAYREADY: {
const PlayReadyKeySystemUUID = new Uint8Array([
0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86, 0xab, 0x92, 0xe6,
0x5b, 0xe0, 0x88, 0x5f, 0x95,
]);
// Setting `pssh` on this LevelKey/DecryptData allows HLS.js to generate a session using
// the playlist-key before the "encrypted" event. (Comment out to only use "encrypted" path.)
this.pssh = mp4pssh(PlayReadyKeySystemUUID, null, keyBytes);
this.keyId = parsePlayReadyWRM(keyBytes);
break;
}
default: {
let keydata = keyBytes.subarray(0, 16);
if (keydata.length !== 16) {
const padded = new Uint8Array(16);
padded.set(keydata, 16 - keydata.length);
keydata = padded;
}
this.keyId = keydata;
break;
}
}
}
// Default behavior: assign a new keyId for each uri
if (!this.keyId || this.keyId.byteLength !== 16) {
let keyId = keyUriToKeyIdMap[this.uri];
if (!keyId) {
const val =
Object.keys(keyUriToKeyIdMap).length % Number.MAX_SAFE_INTEGER;
keyId = new Uint8Array(16);
const dv = new DataView(keyId.buffer, 12, 4); // Just set the last 4 bytes
dv.setUint32(0, val);
keyUriToKeyIdMap[this.uri] = keyId;
}
this.keyId = keyId;
}
return this;
}
}
function createInitializationVector(segmentNumber: number): Uint8Array {
const uint8View = new Uint8Array(16);
for (let i = 12; i < 16; i++) {
uint8View[i] = (segmentNumber >> (8 * (15 - i))) & 0xff;
}
return uint8View;
}

17
server/node_modules/hls.js/src/loader/load-stats.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
import type {
HlsPerformanceTiming,
HlsProgressivePerformanceTiming,
LoaderStats,
} from '../types/loader';
export class LoadStats implements LoaderStats {
aborted: boolean = false;
loaded: number = 0;
retry: number = 0;
total: number = 0;
chunkCount: number = 0;
bwEstimate: number = 0;
loading: HlsProgressivePerformanceTiming = { start: 0, first: 0, end: 0 };
parsing: HlsPerformanceTiming = { start: 0, end: 0 };
buffering: HlsProgressivePerformanceTiming = { start: 0, first: 0, end: 0 };
}

915
server/node_modules/hls.js/src/loader/m3u8-parser.ts generated vendored Normal file
View File

@@ -0,0 +1,915 @@
import { buildAbsoluteURL } from 'url-toolkit';
import { DateRange } from './date-range';
import { Fragment, Part } from './fragment';
import { LevelDetails } from './level-details';
import { LevelKey } from './level-key';
import { AttrList } from '../utils/attr-list';
import { logger } from '../utils/logger';
import {
addVariableDefinition,
hasVariableReferences,
importVariableDefinition,
substituteVariables,
substituteVariablesInAttributes,
} from '../utils/variable-substitution';
import { isCodecType } from '../utils/codecs';
import type { CodecType } from '../utils/codecs';
import type { MediaPlaylist, MediaAttributes } from '../types/media-playlist';
import type { PlaylistLevelType } from '../types/loader';
import type { LevelAttributes, LevelParsed, VariableMap } from '../types/level';
import type { ContentSteeringOptions } from '../types/events';
type M3U8ParserFragments = Array<Fragment | null>;
export type ParsedMultivariantPlaylist = {
contentSteering: ContentSteeringOptions | null;
levels: LevelParsed[];
playlistParsingError: Error | null;
sessionData: Record<string, AttrList> | null;
sessionKeys: LevelKey[] | null;
startTimeOffset: number | null;
variableList: VariableMap | null;
hasVariableRefs: boolean;
};
type ParsedMultivariantMediaOptions = {
AUDIO?: MediaPlaylist[];
SUBTITLES?: MediaPlaylist[];
'CLOSED-CAPTIONS'?: MediaPlaylist[];
};
const MASTER_PLAYLIST_REGEX =
/#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
const IS_MEDIA_PLAYLIST = /^#EXT(?:INF|-X-TARGETDURATION):/m; // Handle empty Media Playlist (first EXTINF not signaled, but TARGETDURATION present)
const LEVEL_PLAYLIST_REGEX_FAST = new RegExp(
[
/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
/(?!#) *(\S[^\r\n]*)/.source, // segment URI, group 3 => the URI (note newline is not eaten)
/#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y)
/#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec
/#.*/.source, // All other non-segment oriented tags will match with all groups empty
].join('|'),
'g',
);
const LEVEL_PLAYLIST_REGEX_SLOW = new RegExp(
[
/#(EXTM3U)/.source,
/#EXT-X-(DATERANGE|DEFINE|KEY|MAP|PART|PART-INF|PLAYLIST-TYPE|PRELOAD-HINT|RENDITION-REPORT|SERVER-CONTROL|SKIP|START):(.+)/
.source,
/#EXT-X-(BITRATE|DISCONTINUITY-SEQUENCE|MEDIA-SEQUENCE|TARGETDURATION|VERSION): *(\d+)/
.source,
/#EXT-X-(DISCONTINUITY|ENDLIST|GAP|INDEPENDENT-SEGMENTS)/.source,
/(#)([^:]*):(.*)/.source,
/(#)(.*)(?:.*)\r?\n?/.source,
].join('|'),
);
export default class M3U8Parser {
static findGroup(
groups: (
| { id?: string; audioCodec?: string }
| { id?: string; textCodec?: string }
)[],
mediaGroupId: string,
):
| { id?: string; audioCodec?: string }
| { id?: string; textCodec?: string }
| undefined {
for (let i = 0; i < groups.length; i++) {
const group = groups[i];
if (group.id === mediaGroupId) {
return group;
}
}
}
static resolve(url, baseUrl) {
return buildAbsoluteURL(baseUrl, url, { alwaysNormalize: true });
}
static isMediaPlaylist(str: string): boolean {
return IS_MEDIA_PLAYLIST.test(str);
}
static parseMasterPlaylist(
string: string,
baseurl: string,
): ParsedMultivariantPlaylist {
const hasVariableRefs = __USE_VARIABLE_SUBSTITUTION__
? hasVariableReferences(string)
: false;
const parsed: ParsedMultivariantPlaylist = {
contentSteering: null,
levels: [],
playlistParsingError: null,
sessionData: null,
sessionKeys: null,
startTimeOffset: null,
variableList: null,
hasVariableRefs,
};
const levelsWithKnownCodecs: LevelParsed[] = [];
MASTER_PLAYLIST_REGEX.lastIndex = 0;
let result: RegExpExecArray | null;
while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
if (result[1]) {
// '#EXT-X-STREAM-INF' is found, parse level tag in group 1
const attrs = new AttrList(result[1]) as LevelAttributes;
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(parsed, attrs, [
'CODECS',
'SUPPLEMENTAL-CODECS',
'ALLOWED-CPC',
'PATHWAY-ID',
'STABLE-VARIANT-ID',
'AUDIO',
'VIDEO',
'SUBTITLES',
'CLOSED-CAPTIONS',
'NAME',
]);
}
const uri = __USE_VARIABLE_SUBSTITUTION__
? substituteVariables(parsed, result[2])
: result[2];
const level: LevelParsed = {
attrs,
bitrate:
attrs.decimalInteger('BANDWIDTH') ||
attrs.decimalInteger('AVERAGE-BANDWIDTH'),
name: attrs.NAME,
url: M3U8Parser.resolve(uri, baseurl),
};
const resolution = attrs.decimalResolution('RESOLUTION');
if (resolution) {
level.width = resolution.width;
level.height = resolution.height;
}
setCodecs(attrs.CODECS, level);
if (!level.unknownCodecs?.length) {
levelsWithKnownCodecs.push(level);
}
parsed.levels.push(level);
} else if (result[3]) {
const tag = result[3];
const attributes = result[4];
switch (tag) {
case 'SESSION-DATA': {
// #EXT-X-SESSION-DATA
const sessionAttrs = new AttrList(attributes);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(parsed, sessionAttrs, [
'DATA-ID',
'LANGUAGE',
'VALUE',
'URI',
]);
}
const dataId = sessionAttrs['DATA-ID'];
if (dataId) {
if (parsed.sessionData === null) {
parsed.sessionData = {};
}
parsed.sessionData[dataId] = sessionAttrs;
}
break;
}
case 'SESSION-KEY': {
// #EXT-X-SESSION-KEY
const sessionKey = parseKey(attributes, baseurl, parsed);
if (sessionKey.encrypted && sessionKey.isSupported()) {
if (parsed.sessionKeys === null) {
parsed.sessionKeys = [];
}
parsed.sessionKeys.push(sessionKey);
} else {
logger.warn(
`[Keys] Ignoring invalid EXT-X-SESSION-KEY tag: "${attributes}"`,
);
}
break;
}
case 'DEFINE': {
// #EXT-X-DEFINE
if (__USE_VARIABLE_SUBSTITUTION__) {
const variableAttributes = new AttrList(attributes);
substituteVariablesInAttributes(parsed, variableAttributes, [
'NAME',
'VALUE',
'QUERYPARAM',
]);
addVariableDefinition(parsed, variableAttributes, baseurl);
}
break;
}
case 'CONTENT-STEERING': {
// #EXT-X-CONTENT-STEERING
const contentSteeringAttributes = new AttrList(attributes);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(
parsed,
contentSteeringAttributes,
['SERVER-URI', 'PATHWAY-ID'],
);
}
parsed.contentSteering = {
uri: M3U8Parser.resolve(
contentSteeringAttributes['SERVER-URI'],
baseurl,
),
pathwayId: contentSteeringAttributes['PATHWAY-ID'] || '.',
};
break;
}
case 'START': {
// #EXT-X-START
parsed.startTimeOffset = parseStartTimeOffset(attributes);
break;
}
default:
break;
}
}
}
// Filter out levels with unknown codecs if it does not remove all levels
const stripUnknownCodecLevels =
levelsWithKnownCodecs.length > 0 &&
levelsWithKnownCodecs.length < parsed.levels.length;
parsed.levels = stripUnknownCodecLevels
? levelsWithKnownCodecs
: parsed.levels;
if (parsed.levels.length === 0) {
parsed.playlistParsingError = new Error('no levels found in manifest');
}
return parsed;
}
static parseMasterPlaylistMedia(
string: string,
baseurl: string,
parsed: ParsedMultivariantPlaylist,
): ParsedMultivariantMediaOptions {
let result: RegExpExecArray | null;
const results: ParsedMultivariantMediaOptions = {};
const levels = parsed.levels;
const groupsByType = {
AUDIO: levels.map((level: LevelParsed) => ({
id: level.attrs.AUDIO,
audioCodec: level.audioCodec,
})),
SUBTITLES: levels.map((level: LevelParsed) => ({
id: level.attrs.SUBTITLES,
textCodec: level.textCodec,
})),
'CLOSED-CAPTIONS': [],
};
let id = 0;
MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
const attrs = new AttrList(result[1]) as MediaAttributes;
const type = attrs.TYPE;
if (type) {
const groups: (typeof groupsByType)[keyof typeof groupsByType] =
groupsByType[type];
const medias: MediaPlaylist[] = results[type] || [];
results[type] = medias;
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(parsed, attrs, [
'URI',
'GROUP-ID',
'LANGUAGE',
'ASSOC-LANGUAGE',
'STABLE-RENDITION-ID',
'NAME',
'INSTREAM-ID',
'CHARACTERISTICS',
'CHANNELS',
]);
}
const lang = attrs.LANGUAGE;
const assocLang = attrs['ASSOC-LANGUAGE'];
const channels = attrs.CHANNELS;
const characteristics = attrs.CHARACTERISTICS;
const instreamId = attrs['INSTREAM-ID'];
const media: MediaPlaylist = {
attrs,
bitrate: 0,
id: id++,
groupId: attrs['GROUP-ID'] || '',
name: attrs.NAME || lang || '',
type,
default: attrs.bool('DEFAULT'),
autoselect: attrs.bool('AUTOSELECT'),
forced: attrs.bool('FORCED'),
lang,
url: attrs.URI ? M3U8Parser.resolve(attrs.URI, baseurl) : '',
};
if (assocLang) {
media.assocLang = assocLang;
}
if (channels) {
media.channels = channels;
}
if (characteristics) {
media.characteristics = characteristics;
}
if (instreamId) {
media.instreamId = instreamId;
}
if (groups?.length) {
// If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
// If we don't find the track signalled, lets use the first audio groups codec we have
// Acting as a best guess
const groupCodec =
M3U8Parser.findGroup(groups, media.groupId as string) || groups[0];
assignCodec(media, groupCodec, 'audioCodec');
assignCodec(media, groupCodec, 'textCodec');
}
medias.push(media);
}
}
return results;
}
static parseLevelPlaylist(
string: string,
baseurl: string,
id: number,
type: PlaylistLevelType,
levelUrlId: number,
multivariantVariableList: VariableMap | null,
): LevelDetails {
const level = new LevelDetails(baseurl);
const fragments: M3U8ParserFragments = level.fragments;
// The most recent init segment seen (applies to all subsequent segments)
let currentInitSegment: Fragment | null = null;
let currentSN = 0;
let currentPart = 0;
let totalduration = 0;
let discontinuityCounter = 0;
let prevFrag: Fragment | null = null;
let frag: Fragment = new Fragment(type, baseurl);
let result: RegExpExecArray | RegExpMatchArray | null;
let i: number;
let levelkeys: { [key: string]: LevelKey } | undefined;
let firstPdtIndex = -1;
let createNextFrag = false;
let nextByteRange: string | null = null;
LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
level.m3u8 = string;
level.hasVariableRefs = __USE_VARIABLE_SUBSTITUTION__
? hasVariableReferences(string)
: false;
while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
if (createNextFrag) {
createNextFrag = false;
frag = new Fragment(type, baseurl);
// setup the next fragment for part loading
frag.start = totalduration;
frag.sn = currentSN;
frag.cc = discontinuityCounter;
frag.level = id;
if (currentInitSegment) {
frag.initSegment = currentInitSegment;
frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
currentInitSegment.rawProgramDateTime = null;
if (nextByteRange) {
frag.setByteRange(nextByteRange);
nextByteRange = null;
}
}
}
const duration = result[1];
if (duration) {
// INF
frag.duration = parseFloat(duration);
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
const title = (' ' + result[2]).slice(1);
frag.title = title || null;
frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
} else if (result[3]) {
// url
if (Number.isFinite(frag.duration)) {
frag.start = totalduration;
if (levelkeys) {
setFragLevelKeys(frag, levelkeys, level);
}
frag.sn = currentSN;
frag.level = id;
frag.cc = discontinuityCounter;
fragments.push(frag);
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
const uri = (' ' + result[3]).slice(1);
frag.relurl = __USE_VARIABLE_SUBSTITUTION__
? substituteVariables(level, uri)
: uri;
assignProgramDateTime(frag, prevFrag);
prevFrag = frag;
totalduration += frag.duration;
currentSN++;
currentPart = 0;
createNextFrag = true;
}
} else if (result[4]) {
// X-BYTERANGE
const data = (' ' + result[4]).slice(1);
if (prevFrag) {
frag.setByteRange(data, prevFrag);
} else {
frag.setByteRange(data);
}
} else if (result[5]) {
// PROGRAM-DATE-TIME
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
frag.rawProgramDateTime = (' ' + result[5]).slice(1);
frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
if (firstPdtIndex === -1) {
firstPdtIndex = fragments.length;
}
} else {
result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
if (!result) {
logger.warn('No matches on slow regex match for level playlist!');
continue;
}
for (i = 1; i < result.length; i++) {
if (typeof result[i] !== 'undefined') {
break;
}
}
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
const tag = (' ' + result[i]).slice(1);
const value1 = (' ' + result[i + 1]).slice(1);
const value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : '';
switch (tag) {
case 'PLAYLIST-TYPE':
level.type = value1.toUpperCase();
break;
case 'MEDIA-SEQUENCE':
currentSN = level.startSN = parseInt(value1);
break;
case 'SKIP': {
const skipAttrs = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, skipAttrs, [
'RECENTLY-REMOVED-DATERANGES',
]);
}
const skippedSegments =
skipAttrs.decimalInteger('SKIPPED-SEGMENTS');
if (Number.isFinite(skippedSegments)) {
level.skippedSegments = skippedSegments;
// This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
for (let i = skippedSegments; i--; ) {
fragments.unshift(null);
}
currentSN += skippedSegments;
}
const recentlyRemovedDateranges = skipAttrs.enumeratedString(
'RECENTLY-REMOVED-DATERANGES',
);
if (recentlyRemovedDateranges) {
level.recentlyRemovedDateranges =
recentlyRemovedDateranges.split('\t');
}
break;
}
case 'TARGETDURATION':
level.targetduration = Math.max(parseInt(value1), 1);
break;
case 'VERSION':
level.version = parseInt(value1);
break;
case 'INDEPENDENT-SEGMENTS':
case 'EXTM3U':
break;
case 'ENDLIST':
level.live = false;
break;
case '#':
if (value1 || value2) {
frag.tagList.push(value2 ? [value1, value2] : [value1]);
}
break;
case 'DISCONTINUITY':
discontinuityCounter++;
frag.tagList.push(['DIS']);
break;
case 'GAP':
frag.gap = true;
frag.tagList.push([tag]);
break;
case 'BITRATE':
frag.tagList.push([tag, value1]);
break;
case 'DATERANGE': {
const dateRangeAttr = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, dateRangeAttr, [
'ID',
'CLASS',
'START-DATE',
'END-DATE',
'SCTE35-CMD',
'SCTE35-OUT',
'SCTE35-IN',
]);
substituteVariablesInAttributes(
level,
dateRangeAttr,
dateRangeAttr.clientAttrs,
);
}
const dateRange = new DateRange(
dateRangeAttr,
level.dateRanges[dateRangeAttr.ID],
);
if (dateRange.isValid || level.skippedSegments) {
level.dateRanges[dateRange.id] = dateRange;
} else {
logger.warn(`Ignoring invalid DATERANGE tag: "${value1}"`);
}
// Add to fragment tag list for backwards compatibility (< v1.2.0)
frag.tagList.push(['EXT-X-DATERANGE', value1]);
break;
}
case 'DEFINE': {
if (__USE_VARIABLE_SUBSTITUTION__) {
const variableAttributes = new AttrList(value1);
substituteVariablesInAttributes(level, variableAttributes, [
'NAME',
'VALUE',
'IMPORT',
'QUERYPARAM',
]);
if ('IMPORT' in variableAttributes) {
importVariableDefinition(
level,
variableAttributes,
multivariantVariableList,
);
} else {
addVariableDefinition(level, variableAttributes, baseurl);
}
}
break;
}
case 'DISCONTINUITY-SEQUENCE':
discontinuityCounter = parseInt(value1);
break;
case 'KEY': {
const levelKey = parseKey(value1, baseurl, level);
if (levelKey.isSupported()) {
if (levelKey.method === 'NONE') {
levelkeys = undefined;
break;
}
if (!levelkeys) {
levelkeys = {};
}
if (levelkeys[levelKey.keyFormat]) {
levelkeys = Object.assign({}, levelkeys);
}
levelkeys[levelKey.keyFormat] = levelKey;
} else {
logger.warn(`[Keys] Ignoring invalid EXT-X-KEY tag: "${value1}"`);
}
break;
}
case 'START':
level.startTimeOffset = parseStartTimeOffset(value1);
break;
case 'MAP': {
const mapAttrs = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, mapAttrs, [
'BYTERANGE',
'URI',
]);
}
if (frag.duration) {
// Initial segment tag is after segment duration tag.
// #EXTINF: 6.0
// #EXT-X-MAP:URI="init.mp4
const init = new Fragment(type, baseurl);
setInitSegment(init, mapAttrs, id, levelkeys);
currentInitSegment = init;
frag.initSegment = currentInitSegment;
if (
currentInitSegment.rawProgramDateTime &&
!frag.rawProgramDateTime
) {
frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
}
} else {
// Initial segment tag is before segment duration tag
// Handle case where EXT-X-MAP is declared after EXT-X-BYTERANGE
const end = frag.byteRangeEndOffset;
if (end) {
const start = frag.byteRangeStartOffset as number;
nextByteRange = `${end - start}@${start}`;
} else {
nextByteRange = null;
}
setInitSegment(frag, mapAttrs, id, levelkeys);
currentInitSegment = frag;
createNextFrag = true;
}
break;
}
case 'SERVER-CONTROL': {
const serverControlAttrs = new AttrList(value1);
level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD');
level.canSkipUntil = serverControlAttrs.optionalFloat(
'CAN-SKIP-UNTIL',
0,
);
level.canSkipDateRanges =
level.canSkipUntil > 0 &&
serverControlAttrs.bool('CAN-SKIP-DATERANGES');
level.partHoldBack = serverControlAttrs.optionalFloat(
'PART-HOLD-BACK',
0,
);
level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0);
break;
}
case 'PART-INF': {
const partInfAttrs = new AttrList(value1);
level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET');
break;
}
case 'PART': {
let partList = level.partList;
if (!partList) {
partList = level.partList = [];
}
const previousFragmentPart =
currentPart > 0 ? partList[partList.length - 1] : undefined;
const index = currentPart++;
const partAttrs = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, partAttrs, [
'BYTERANGE',
'URI',
]);
}
const part = new Part(
partAttrs,
frag,
baseurl,
index,
previousFragmentPart,
);
partList.push(part);
frag.duration += part.duration;
break;
}
case 'PRELOAD-HINT': {
const preloadHintAttrs = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, preloadHintAttrs, ['URI']);
}
level.preloadHint = preloadHintAttrs;
break;
}
case 'RENDITION-REPORT': {
const renditionReportAttrs = new AttrList(value1);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(level, renditionReportAttrs, [
'URI',
]);
}
level.renditionReports = level.renditionReports || [];
level.renditionReports.push(renditionReportAttrs);
break;
}
default:
logger.warn(`line parsed but not handled: ${result}`);
break;
}
}
}
if (prevFrag && !prevFrag.relurl) {
fragments.pop();
totalduration -= prevFrag.duration;
if (level.partList) {
level.fragmentHint = prevFrag;
}
} else if (level.partList) {
assignProgramDateTime(frag, prevFrag);
frag.cc = discontinuityCounter;
level.fragmentHint = frag;
if (levelkeys) {
setFragLevelKeys(frag, levelkeys, level);
}
}
const fragmentLength = fragments.length;
const firstFragment = fragments[0];
const lastFragment = fragments[fragmentLength - 1];
totalduration += level.skippedSegments * level.targetduration;
if (totalduration > 0 && fragmentLength && lastFragment) {
level.averagetargetduration = totalduration / fragmentLength;
const lastSn = lastFragment.sn;
level.endSN = lastSn !== 'initSegment' ? lastSn : 0;
if (!level.live) {
lastFragment.endList = true;
}
if (firstFragment) {
level.startCC = firstFragment.cc;
}
} else {
level.endSN = 0;
level.startCC = 0;
}
if (level.fragmentHint) {
totalduration += level.fragmentHint.duration;
}
level.totalduration = totalduration;
level.endCC = discontinuityCounter;
/**
* Backfill any missing PDT values
* "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
* one or more Media Segment URIs, the client SHOULD extrapolate
* backward from that tag (using EXTINF durations and/or media
* timestamps) to associate dates with those segments."
* We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
* computed.
*/
if (firstPdtIndex > 0) {
backfillProgramDateTimes(fragments, firstPdtIndex);
}
return level;
}
}
function parseKey(
keyTagAttributes: string,
baseurl: string,
parsed: ParsedMultivariantPlaylist | LevelDetails,
): LevelKey {
// https://tools.ietf.org/html/rfc8216#section-4.3.2.4
const keyAttrs = new AttrList(keyTagAttributes);
if (__USE_VARIABLE_SUBSTITUTION__) {
substituteVariablesInAttributes(parsed, keyAttrs, [
'KEYFORMAT',
'KEYFORMATVERSIONS',
'URI',
'IV',
'URI',
]);
}
const decryptmethod = keyAttrs.METHOD ?? '';
const decrypturi = keyAttrs.URI;
const decryptiv = keyAttrs.hexadecimalInteger('IV');
const decryptkeyformatversions = keyAttrs.KEYFORMATVERSIONS;
// From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
const decryptkeyformat = keyAttrs.KEYFORMAT ?? 'identity';
if (decrypturi && keyAttrs.IV && !decryptiv) {
logger.error(`Invalid IV: ${keyAttrs.IV}`);
}
// If decrypturi is a URI with a scheme, then baseurl will be ignored
// No uri is allowed when METHOD is NONE
const resolvedUri = decrypturi ? M3U8Parser.resolve(decrypturi, baseurl) : '';
const keyFormatVersions = (
decryptkeyformatversions ? decryptkeyformatversions : '1'
)
.split('/')
.map(Number)
.filter(Number.isFinite);
return new LevelKey(
decryptmethod,
resolvedUri,
decryptkeyformat,
keyFormatVersions,
decryptiv,
);
}
function parseStartTimeOffset(startAttributes: string): number | null {
const startAttrs = new AttrList(startAttributes);
const startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET');
if (Number.isFinite(startTimeOffset)) {
return startTimeOffset;
}
return null;
}
function setCodecs(
codecsAttributeValue: string | undefined,
level: LevelParsed,
) {
let codecs = (codecsAttributeValue || '').split(/[ ,]+/).filter((c) => c);
['video', 'audio', 'text'].forEach((type: CodecType) => {
const filtered = codecs.filter((codec) => isCodecType(codec, type));
if (filtered.length) {
// Comma separated list of all codecs for type
level[`${type}Codec`] = filtered.join(',');
// Remove known codecs so that only unknownCodecs are left after iterating through each type
codecs = codecs.filter((codec) => filtered.indexOf(codec) === -1);
}
});
level.unknownCodecs = codecs;
}
function assignCodec(
media: MediaPlaylist,
groupItem: { audioCodec?: string; textCodec?: string },
codecProperty: 'audioCodec' | 'textCodec',
) {
const codecValue = groupItem[codecProperty];
if (codecValue) {
media[codecProperty] = codecValue;
}
}
function backfillProgramDateTimes(
fragments: M3U8ParserFragments,
firstPdtIndex: number,
) {
let fragPrev = fragments[firstPdtIndex] as Fragment;
for (let i = firstPdtIndex; i--; ) {
const frag = fragments[i];
// Exit on delta-playlist skipped segments
if (!frag) {
return;
}
frag.programDateTime =
(fragPrev.programDateTime as number) - frag.duration * 1000;
fragPrev = frag;
}
}
function assignProgramDateTime(frag, prevFrag) {
if (frag.rawProgramDateTime) {
frag.programDateTime = Date.parse(frag.rawProgramDateTime);
} else if (prevFrag?.programDateTime) {
frag.programDateTime = prevFrag.endProgramDateTime;
}
if (!Number.isFinite(frag.programDateTime)) {
frag.programDateTime = null;
frag.rawProgramDateTime = null;
}
}
function setInitSegment(
frag: Fragment,
mapAttrs: AttrList,
id: number,
levelkeys: { [key: string]: LevelKey } | undefined,
) {
frag.relurl = mapAttrs.URI;
if (mapAttrs.BYTERANGE) {
frag.setByteRange(mapAttrs.BYTERANGE);
}
frag.level = id;
frag.sn = 'initSegment';
if (levelkeys) {
frag.levelkeys = levelkeys;
}
frag.initSegment = null;
}
function setFragLevelKeys(
frag: Fragment,
levelkeys: { [key: string]: LevelKey },
level: LevelDetails,
) {
frag.levelkeys = levelkeys;
const { encryptedFragments } = level;
if (
(!encryptedFragments.length ||
encryptedFragments[encryptedFragments.length - 1].levelkeys !==
levelkeys) &&
Object.keys(levelkeys).some(
(format) => levelkeys![format].isCommonEncryption,
)
) {
encryptedFragments.push(frag);
}
}

View File

@@ -0,0 +1,716 @@
/**
* PlaylistLoader - delegate for media manifest/playlist loading tasks. Takes care of parsing media to internal data-models.
*
* Once loaded, dispatches events with parsed data-models of manifest/levels/audio/subtitle tracks.
*
* Uses loader(s) set in config to do actual internal loading of resource tasks.
*/
import { Events } from '../events';
import { ErrorDetails, ErrorTypes } from '../errors';
import { logger } from '../utils/logger';
import M3U8Parser from './m3u8-parser';
import type { LevelParsed, VariableMap } from '../types/level';
import type {
Loader,
LoaderCallbacks,
LoaderConfiguration,
LoaderContext,
LoaderResponse,
LoaderStats,
PlaylistLoaderContext,
} from '../types/loader';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import { LevelDetails } from './level-details';
import { AttrList } from '../utils/attr-list';
import type Hls from '../hls';
import type {
ErrorData,
LevelLoadingData,
ManifestLoadingData,
TrackLoadingData,
} from '../types/events';
import type { NetworkComponentAPI } from '../types/component-api';
import type { MediaAttributes } from '../types/media-playlist';
import type { LoaderConfig, RetryConfig } from '../config';
function mapContextToLevelType(
context: PlaylistLoaderContext,
): PlaylistLevelType {
const { type } = context;
switch (type) {
case PlaylistContextType.AUDIO_TRACK:
return PlaylistLevelType.AUDIO;
case PlaylistContextType.SUBTITLE_TRACK:
return PlaylistLevelType.SUBTITLE;
default:
return PlaylistLevelType.MAIN;
}
}
function getResponseUrl(
response: LoaderResponse,
context: PlaylistLoaderContext,
): string {
let url = response.url;
// responseURL not supported on some browsers (it is used to detect URL redirection)
// data-uri mode also not supported (but no need to detect redirection)
if (url === undefined || url.indexOf('data:') === 0) {
// fallback to initial URL
url = context.url;
}
return url;
}
class PlaylistLoader implements NetworkComponentAPI {
private readonly hls: Hls;
private readonly loaders: {
[key: string]: Loader<LoaderContext>;
} = Object.create(null);
private variableList: VariableMap | null = null;
constructor(hls: Hls) {
this.hls = hls;
this.registerListeners();
}
public startLoad(startPosition: number): void {}
public stopLoad(): void {
this.destroyInternalLoaders();
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
hls.on(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
hls.off(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
}
/**
* Returns defaults or configured loader-type overloads (pLoader and loader config params)
*/
private createInternalLoader(
context: PlaylistLoaderContext,
): Loader<LoaderContext> {
const config = this.hls.config;
const PLoader = config.pLoader;
const Loader = config.loader;
const InternalLoader = PLoader || Loader;
const loader = new InternalLoader(config) as Loader<PlaylistLoaderContext>;
this.loaders[context.type] = loader;
return loader;
}
private getInternalLoader(
context: PlaylistLoaderContext,
): Loader<LoaderContext> | undefined {
return this.loaders[context.type];
}
private resetInternalLoader(contextType): void {
if (this.loaders[contextType]) {
delete this.loaders[contextType];
}
}
/**
* Call `destroy` on all internal loader instances mapped (one per context type)
*/
private destroyInternalLoaders(): void {
for (const contextType in this.loaders) {
const loader = this.loaders[contextType];
if (loader) {
loader.destroy();
}
this.resetInternalLoader(contextType);
}
}
public destroy(): void {
this.variableList = null;
this.unregisterListeners();
this.destroyInternalLoaders();
}
private onManifestLoading(
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData,
) {
const { url } = data;
this.variableList = null;
this.load({
id: null,
level: 0,
responseType: 'text',
type: PlaylistContextType.MANIFEST,
url,
deliveryDirectives: null,
});
}
private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
const { id, level, pathwayId, url, deliveryDirectives } = data;
this.load({
id,
level,
pathwayId,
responseType: 'text',
type: PlaylistContextType.LEVEL,
url,
deliveryDirectives,
});
}
private onAudioTrackLoading(
event: Events.AUDIO_TRACK_LOADING,
data: TrackLoadingData,
) {
const { id, groupId, url, deliveryDirectives } = data;
this.load({
id,
groupId,
level: null,
responseType: 'text',
type: PlaylistContextType.AUDIO_TRACK,
url,
deliveryDirectives,
});
}
private onSubtitleTrackLoading(
event: Events.SUBTITLE_TRACK_LOADING,
data: TrackLoadingData,
) {
const { id, groupId, url, deliveryDirectives } = data;
this.load({
id,
groupId,
level: null,
responseType: 'text',
type: PlaylistContextType.SUBTITLE_TRACK,
url,
deliveryDirectives,
});
}
private load(context: PlaylistLoaderContext): void {
const config = this.hls.config;
// logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
// Check if a loader for this context already exists
let loader = this.getInternalLoader(context);
if (loader) {
const loaderContext = loader.context as PlaylistLoaderContext;
if (
loaderContext &&
loaderContext.url === context.url &&
loaderContext.level === context.level
) {
// same URL can't overlap
logger.trace('[playlist-loader]: playlist request ongoing');
return;
}
logger.log(
`[playlist-loader]: aborting previous loader for type: ${context.type}`,
);
loader.abort();
}
// apply different configs for retries depending on
// context (manifest, level, audio/subs playlist)
let loadPolicy: LoaderConfig;
if (context.type === PlaylistContextType.MANIFEST) {
loadPolicy = config.manifestLoadPolicy.default;
} else {
loadPolicy = Object.assign({}, config.playlistLoadPolicy.default, {
timeoutRetry: null,
errorRetry: null,
});
}
loader = this.createInternalLoader(context);
// Override level/track timeout for LL-HLS requests
// (the default of 10000ms is counter productive to blocking playlist reload requests)
if (Number.isFinite(context.deliveryDirectives?.part)) {
let levelDetails: LevelDetails | undefined;
if (
context.type === PlaylistContextType.LEVEL &&
context.level !== null
) {
levelDetails = this.hls.levels[context.level].details;
} else if (
context.type === PlaylistContextType.AUDIO_TRACK &&
context.id !== null
) {
levelDetails = this.hls.audioTracks[context.id].details;
} else if (
context.type === PlaylistContextType.SUBTITLE_TRACK &&
context.id !== null
) {
levelDetails = this.hls.subtitleTracks[context.id].details;
}
if (levelDetails) {
const partTarget = levelDetails.partTarget;
const targetDuration = levelDetails.targetduration;
if (partTarget && targetDuration) {
const maxLowLatencyPlaylistRefresh =
Math.max(partTarget * 3, targetDuration * 0.8) * 1000;
loadPolicy = Object.assign({}, loadPolicy, {
maxTimeToFirstByteMs: Math.min(
maxLowLatencyPlaylistRefresh,
loadPolicy.maxTimeToFirstByteMs,
),
maxLoadTimeMs: Math.min(
maxLowLatencyPlaylistRefresh,
loadPolicy.maxTimeToFirstByteMs,
),
});
}
}
}
const legacyRetryCompatibility: RetryConfig | Record<string, void> =
loadPolicy.errorRetry || loadPolicy.timeoutRetry || {};
const loaderConfig: LoaderConfiguration = {
loadPolicy,
timeout: loadPolicy.maxLoadTimeMs,
maxRetry: legacyRetryCompatibility.maxNumRetry || 0,
retryDelay: legacyRetryCompatibility.retryDelayMs || 0,
maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0,
};
const loaderCallbacks: LoaderCallbacks<PlaylistLoaderContext> = {
onSuccess: (response, stats, context, networkDetails) => {
const loader = this.getInternalLoader(context) as
| Loader<PlaylistLoaderContext>
| undefined;
this.resetInternalLoader(context.type);
const string = response.data as string;
// Validate if it is an M3U8 at all
if (string.indexOf('#EXTM3U') !== 0) {
this.handleManifestParsingError(
response,
context,
new Error('no EXTM3U delimiter'),
networkDetails || null,
stats,
);
return;
}
stats.parsing.start = performance.now();
if (M3U8Parser.isMediaPlaylist(string)) {
this.handleTrackOrLevelPlaylist(
response,
stats,
context,
networkDetails || null,
loader,
);
} else {
this.handleMasterPlaylist(response, stats, context, networkDetails);
}
},
onError: (response, context, networkDetails, stats) => {
this.handleNetworkError(
context,
networkDetails,
false,
response,
stats,
);
},
onTimeout: (stats, context, networkDetails) => {
this.handleNetworkError(
context,
networkDetails,
true,
undefined,
stats,
);
},
};
// logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
loader.load(context, loaderConfig, loaderCallbacks);
}
private handleMasterPlaylist(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any,
): void {
const hls = this.hls;
const string = response.data as string;
const url = getResponseUrl(response, context);
const parsedResult = M3U8Parser.parseMasterPlaylist(string, url);
if (parsedResult.playlistParsingError) {
this.handleManifestParsingError(
response,
context,
parsedResult.playlistParsingError,
networkDetails,
stats,
);
return;
}
const {
contentSteering,
levels,
sessionData,
sessionKeys,
startTimeOffset,
variableList,
} = parsedResult;
this.variableList = variableList;
const {
AUDIO: audioTracks = [],
SUBTITLES: subtitles,
'CLOSED-CAPTIONS': captions,
} = M3U8Parser.parseMasterPlaylistMedia(string, url, parsedResult);
if (audioTracks.length) {
// check if we have found an audio track embedded in main playlist (audio track without URI attribute)
const embeddedAudioFound: boolean = audioTracks.some(
(audioTrack) => !audioTrack.url,
);
// if no embedded audio track defined, but audio codec signaled in quality level,
// we need to signal this main audio track this could happen with playlists with
// alt audio rendition in which quality levels (main)
// contains both audio+video. but with mixed audio track not signaled
if (
!embeddedAudioFound &&
levels[0].audioCodec &&
!levels[0].attrs.AUDIO
) {
logger.log(
'[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one',
);
audioTracks.unshift({
type: 'main',
name: 'main',
groupId: 'main',
default: false,
autoselect: false,
forced: false,
id: -1,
attrs: new AttrList({}) as MediaAttributes,
bitrate: 0,
url: '',
});
}
}
hls.trigger(Events.MANIFEST_LOADED, {
levels,
audioTracks,
subtitles,
captions,
contentSteering,
url,
stats,
networkDetails,
sessionData,
sessionKeys,
startTimeOffset,
variableList,
});
}
private handleTrackOrLevelPlaylist(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any,
loader: Loader<PlaylistLoaderContext> | undefined,
): void {
const hls = this.hls;
const { id, level, type } = context;
const url = getResponseUrl(response, context);
const levelUrlId = 0;
const levelId = Number.isFinite(level as number)
? (level as number)
: Number.isFinite(id as number)
? (id as number)
: 0;
const levelType = mapContextToLevelType(context);
const levelDetails: LevelDetails = M3U8Parser.parseLevelPlaylist(
response.data as string,
url,
levelId,
levelType,
levelUrlId,
this.variableList,
);
// We have done our first request (Manifest-type) and receive
// not a master playlist but a chunk-list (track/level)
// We fire the manifest-loaded event anyway with the parsed level-details
// by creating a single-level structure for it.
if (type === PlaylistContextType.MANIFEST) {
const singleLevel: LevelParsed = {
attrs: new AttrList({}),
bitrate: 0,
details: levelDetails,
name: '',
url,
};
hls.trigger(Events.MANIFEST_LOADED, {
levels: [singleLevel],
audioTracks: [],
url,
stats,
networkDetails,
sessionData: null,
sessionKeys: null,
contentSteering: null,
startTimeOffset: null,
variableList: null,
});
}
// save parsing time
stats.parsing.end = performance.now();
// extend the context with the new levelDetails property
context.levelDetails = levelDetails;
this.handlePlaylistLoaded(
levelDetails,
response,
stats,
context,
networkDetails,
loader,
);
}
private handleManifestParsingError(
response: LoaderResponse,
context: PlaylistLoaderContext,
error: Error,
networkDetails: any,
stats: LoaderStats,
): void {
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.MANIFEST_PARSING_ERROR,
fatal: context.type === PlaylistContextType.MANIFEST,
url: response.url,
err: error,
error,
reason: error.message,
response,
context,
networkDetails,
stats,
});
}
private handleNetworkError(
context: PlaylistLoaderContext,
networkDetails: any,
timeout = false,
response: { code: number; text: string } | undefined,
stats: LoaderStats,
): void {
let message = `A network ${
timeout
? 'timeout'
: 'error' + (response ? ' (status ' + response.code + ')' : '')
} occurred while loading ${context.type}`;
if (context.type === PlaylistContextType.LEVEL) {
message += `: ${context.level} id: ${context.id}`;
} else if (
context.type === PlaylistContextType.AUDIO_TRACK ||
context.type === PlaylistContextType.SUBTITLE_TRACK
) {
message += ` id: ${context.id} group-id: "${context.groupId}"`;
}
const error = new Error(message);
logger.warn(`[playlist-loader]: ${message}`);
let details = ErrorDetails.UNKNOWN;
let fatal = false;
const loader = this.getInternalLoader(context);
switch (context.type) {
case PlaylistContextType.MANIFEST:
details = timeout
? ErrorDetails.MANIFEST_LOAD_TIMEOUT
: ErrorDetails.MANIFEST_LOAD_ERROR;
fatal = true;
break;
case PlaylistContextType.LEVEL:
details = timeout
? ErrorDetails.LEVEL_LOAD_TIMEOUT
: ErrorDetails.LEVEL_LOAD_ERROR;
fatal = false;
break;
case PlaylistContextType.AUDIO_TRACK:
details = timeout
? ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT
: ErrorDetails.AUDIO_TRACK_LOAD_ERROR;
fatal = false;
break;
case PlaylistContextType.SUBTITLE_TRACK:
details = timeout
? ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT
: ErrorDetails.SUBTITLE_LOAD_ERROR;
fatal = false;
break;
}
if (loader) {
this.resetInternalLoader(context.type);
}
const errorData: ErrorData = {
type: ErrorTypes.NETWORK_ERROR,
details,
fatal,
url: context.url,
loader,
context,
error,
networkDetails,
stats,
};
if (response) {
const url = networkDetails?.url || context.url;
errorData.response = { url, data: undefined as any, ...response };
}
this.hls.trigger(Events.ERROR, errorData);
}
private handlePlaylistLoaded(
levelDetails: LevelDetails,
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any,
loader: Loader<PlaylistLoaderContext> | undefined,
): void {
const hls = this.hls;
const { type, level, id, groupId, deliveryDirectives } = context;
const url = getResponseUrl(response, context);
const parent = mapContextToLevelType(context);
const levelIndex =
typeof context.level === 'number' && parent === PlaylistLevelType.MAIN
? (level as number)
: undefined;
if (!levelDetails.fragments.length) {
const error = new Error('No Segments found in Playlist');
hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.LEVEL_EMPTY_ERROR,
fatal: false,
url,
error,
reason: error.message,
response,
context,
level: levelIndex,
parent,
networkDetails,
stats,
});
return;
}
if (!levelDetails.targetduration) {
levelDetails.playlistParsingError = new Error('Missing Target Duration');
}
const error = levelDetails.playlistParsingError;
if (error) {
hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.LEVEL_PARSING_ERROR,
fatal: false,
url,
error,
reason: error.message,
response,
context,
level: levelIndex,
parent,
networkDetails,
stats,
});
return;
}
if (levelDetails.live && loader) {
if (loader.getCacheAge) {
levelDetails.ageHeader = loader.getCacheAge() || 0;
}
if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) {
levelDetails.ageHeader = 0;
}
}
switch (type) {
case PlaylistContextType.MANIFEST:
case PlaylistContextType.LEVEL:
hls.trigger(Events.LEVEL_LOADED, {
details: levelDetails,
level: levelIndex || 0,
id: id || 0,
stats,
networkDetails,
deliveryDirectives,
});
break;
case PlaylistContextType.AUDIO_TRACK:
hls.trigger(Events.AUDIO_TRACK_LOADED, {
details: levelDetails,
id: id || 0,
groupId: groupId || '',
stats,
networkDetails,
deliveryDirectives,
});
break;
case PlaylistContextType.SUBTITLE_TRACK:
hls.trigger(Events.SUBTITLE_TRACK_LOADED, {
details: levelDetails,
id: id || 0,
groupId: groupId || '',
stats,
networkDetails,
deliveryDirectives,
});
break;
}
}
}
export default PlaylistLoader;

15
server/node_modules/hls.js/src/polyfills/number.ts generated vendored Normal file
View File

@@ -0,0 +1,15 @@
// https://caniuse.com/mdn-javascript_builtins_number_isfinite
export const isFiniteNumber =
Number.isFinite ||
function (value) {
return typeof value === 'number' && isFinite(value);
};
// https://caniuse.com/mdn-javascript_builtins_number_issafeinteger
export const isSafeInteger =
Number.isSafeInteger ||
function (value) {
return typeof value === 'number' && Math.abs(value) <= MAX_SAFE_INTEGER;
};
export const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;

81
server/node_modules/hls.js/src/remux/aac-helper.ts generated vendored Normal file
View File

@@ -0,0 +1,81 @@
/**
* AAC helper
*/
class AAC {
static getSilentFrame(
codec?: string,
channelCount?: number,
): Uint8Array | undefined {
switch (codec) {
case 'mp4a.40.2':
if (channelCount === 1) {
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]);
} else if (channelCount === 2) {
return new Uint8Array([
0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80,
]);
} else if (channelCount === 3) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x8e,
]);
} else if (channelCount === 4) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38,
]);
} else if (channelCount === 5) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38,
]);
} else if (channelCount === 6) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2,
0x00, 0x20, 0x08, 0xe0,
]);
}
break;
// handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
default:
if (channelCount === 1) {
// ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
} else if (channelCount === 2) {
// ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
} else if (channelCount === 3) {
// ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
}
break;
}
return undefined;
}
}
export default AAC;

1128
server/node_modules/hls.js/src/remux/mp4-generator.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

1235
server/node_modules/hls.js/src/remux/mp4-remuxer.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,303 @@
import {
flushTextTrackMetadataCueSamples,
flushTextTrackUserdataCueSamples,
} from './mp4-remuxer';
import {
InitData,
InitDataTrack,
patchEncyptionData,
} from '../utils/mp4-tools';
import {
getDuration,
getStartDTS,
offsetStartDTS,
parseInitSegment,
} from '../utils/mp4-tools';
import { ElementaryStreamTypes } from '../loader/fragment';
import { logger } from '../utils/logger';
import { getCodecCompatibleName } from '../utils/codecs';
import type { TrackSet } from '../types/track';
import type {
InitSegmentData,
RemuxedTrack,
Remuxer,
RemuxerResult,
} from '../types/remuxer';
import type {
DemuxedAudioTrack,
DemuxedMetadataTrack,
DemuxedUserdataTrack,
PassthroughTrack,
} from '../types/demuxer';
import type { DecryptData } from '../loader/level-key';
import type { RationalTimestamp } from '../utils/timescale-conversion';
class PassThroughRemuxer implements Remuxer {
private emitInitSegment: boolean = false;
private audioCodec?: string;
private videoCodec?: string;
private initData?: InitData;
private initPTS: RationalTimestamp | null = null;
private initTracks?: TrackSet;
private lastEndTime: number | null = null;
public destroy() {}
public resetTimeStamp(defaultInitPTS: RationalTimestamp | null) {
this.initPTS = defaultInitPTS;
this.lastEndTime = null;
}
public resetNextTimestamp() {
this.lastEndTime = null;
}
public resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
decryptdata: DecryptData | null,
) {
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.generateInitSegment(patchEncyptionData(initSegment, decryptdata));
this.emitInitSegment = true;
}
private generateInitSegment(initSegment: Uint8Array | undefined): void {
let { audioCodec, videoCodec } = this;
if (!initSegment?.byteLength) {
this.initTracks = undefined;
this.initData = undefined;
return;
}
const initData = (this.initData = parseInitSegment(initSegment));
// Get codec from initSegment or fallback to default
if (initData.audio) {
audioCodec = getParsedTrackCodec(
initData.audio,
ElementaryStreamTypes.AUDIO,
);
}
if (initData.video) {
videoCodec = getParsedTrackCodec(
initData.video,
ElementaryStreamTypes.VIDEO,
);
}
const tracks: TrackSet = {};
if (initData.audio && initData.video) {
tracks.audiovideo = {
container: 'video/mp4',
codec: audioCodec + ',' + videoCodec,
initSegment,
id: 'main',
};
} else if (initData.audio) {
tracks.audio = {
container: 'audio/mp4',
codec: audioCodec,
initSegment,
id: 'audio',
};
} else if (initData.video) {
tracks.video = {
container: 'video/mp4',
codec: videoCodec,
initSegment,
id: 'main',
};
} else {
logger.warn(
'[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.',
);
}
this.initTracks = tracks;
}
public remux(
audioTrack: DemuxedAudioTrack,
videoTrack: PassthroughTrack,
id3Track: DemuxedMetadataTrack,
textTrack: DemuxedUserdataTrack,
timeOffset: number,
accurateTimeOffset: boolean,
): RemuxerResult {
let { initPTS, lastEndTime } = this;
const result: RemuxerResult = {
audio: undefined,
video: undefined,
text: textTrack,
id3: id3Track,
initSegment: undefined,
};
// If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
// lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
// the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
if (!Number.isFinite(lastEndTime!)) {
lastEndTime = this.lastEndTime = timeOffset || 0;
}
// The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
// audio or video (or both); adding it to video was an arbitrary choice.
const data = videoTrack.samples;
if (!data?.length) {
return result;
}
const initSegment: InitSegmentData = {
initPTS: undefined,
timescale: 1,
};
let initData = this.initData;
if (!initData?.length) {
this.generateInitSegment(data);
initData = this.initData;
}
if (!initData?.length) {
// We can't remux if the initSegment could not be generated
logger.warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');
return result;
}
if (this.emitInitSegment) {
initSegment.tracks = this.initTracks as TrackSet;
this.emitInitSegment = false;
}
const duration = getDuration(data, initData);
const startDTS = getStartDTS(initData, data);
const decodeTime = startDTS === null ? timeOffset : startDTS;
if (
isInvalidInitPts(initPTS, decodeTime, timeOffset, duration) ||
(initSegment.timescale !== initPTS.timescale && accurateTimeOffset)
) {
initSegment.initPTS = decodeTime - timeOffset;
if (initPTS && initPTS.timescale === 1) {
logger.warn(
`Adjusting initPTS by ${initSegment.initPTS - initPTS.baseTime}`,
);
}
this.initPTS = initPTS = {
baseTime: initSegment.initPTS,
timescale: 1,
};
}
const startTime = audioTrack
? decodeTime - initPTS.baseTime / initPTS.timescale
: (lastEndTime as number);
const endTime = startTime + duration;
offsetStartDTS(initData, data, initPTS.baseTime / initPTS.timescale);
if (duration > 0) {
this.lastEndTime = endTime;
} else {
logger.warn('Duration parsed from mp4 should be greater than zero');
this.resetNextTimestamp();
}
const hasAudio = !!initData.audio;
const hasVideo = !!initData.video;
let type: any = '';
if (hasAudio) {
type += 'audio';
}
if (hasVideo) {
type += 'video';
}
const track: RemuxedTrack = {
data1: data,
startPTS: startTime,
startDTS: startTime,
endPTS: endTime,
endDTS: endTime,
type,
hasAudio,
hasVideo,
nb: 1,
dropped: 0,
};
result.audio = track.type === 'audio' ? track : undefined;
result.video = track.type !== 'audio' ? track : undefined;
result.initSegment = initSegment;
result.id3 = flushTextTrackMetadataCueSamples(
id3Track,
timeOffset,
initPTS,
initPTS,
);
if (textTrack.samples.length) {
result.text = flushTextTrackUserdataCueSamples(
textTrack,
timeOffset,
initPTS,
);
}
return result;
}
}
function isInvalidInitPts(
initPTS: RationalTimestamp | null,
startDTS: number,
timeOffset: number,
duration: number,
): initPTS is null {
if (initPTS === null) {
return true;
}
// InitPTS is invalid when distance from program would be more than segment duration or a minimum of one second
const minDuration = Math.max(duration, 1);
const startTime = startDTS - initPTS.baseTime / initPTS.timescale;
return Math.abs(startTime - timeOffset) > minDuration;
}
function getParsedTrackCodec(
track: InitDataTrack,
type: ElementaryStreamTypes.AUDIO | ElementaryStreamTypes.VIDEO,
): string {
const parsedCodec = track?.codec;
if (parsedCodec && parsedCodec.length > 4) {
return parsedCodec;
}
if (type === ElementaryStreamTypes.AUDIO) {
if (
parsedCodec === 'ec-3' ||
parsedCodec === 'ac-3' ||
parsedCodec === 'alac'
) {
return parsedCodec;
}
if (parsedCodec === 'fLaC' || parsedCodec === 'Opus') {
// Opting not to get `preferManagedMediaSource` from player config for isSupported() check for simplicity
const preferManagedMediaSource = false;
return getCodecCompatibleName(parsedCodec, preferManagedMediaSource);
}
const result = 'mp4a.40.5';
logger.info(
`Parsed audio codec "${parsedCodec}" or audio object type not handled. Using "${result}"`,
);
return result;
}
// Provide defaults based on codec type
// This allows for some playback of some fmp4 playlists without CODECS defined in manifest
logger.warn(`Unhandled video codec "${parsedCodec}"`);
if (parsedCodec === 'hvc1' || parsedCodec === 'hev1') {
return 'hvc1.1.6.L120.90';
}
if (parsedCodec === 'av01') {
return 'av01.0.04M.08';
}
return 'avc1.42e01e';
}
export default PassThroughRemuxer;

127
server/node_modules/hls.js/src/task-loop.ts generated vendored Normal file
View File

@@ -0,0 +1,127 @@
/**
* @ignore
* Sub-class specialization of EventHandler base class.
*
* TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
* scheduled asynchroneously, avoiding recursive calls in the same tick.
*
* The task itself is implemented in `doTick`. It can be requested and called for single execution
* using the `tick` method.
*
* It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
* no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
*
* If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
* and cancelled with `clearNextTick`.
*
* The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
*
* Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
*
* Further explanations:
*
* The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
* only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
*
* When the task execution (`tick` method) is called in re-entrant way this is detected and
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
*/
export default class TaskLoop {
private readonly _boundTick: () => void;
private _tickTimer: number | null = null;
private _tickInterval: number | null = null;
private _tickCallCount = 0;
constructor() {
this._boundTick = this.tick.bind(this);
}
public destroy() {
this.onHandlerDestroying();
this.onHandlerDestroyed();
}
protected onHandlerDestroying() {
// clear all timers before unregistering from event bus
this.clearNextTick();
this.clearInterval();
}
protected onHandlerDestroyed() {}
public hasInterval(): boolean {
return !!this._tickInterval;
}
public hasNextTick(): boolean {
return !!this._tickTimer;
}
/**
* @param millis - Interval time (ms)
* @eturns True when interval has been scheduled, false when already scheduled (no effect)
*/
public setInterval(millis: number): boolean {
if (!this._tickInterval) {
this._tickCallCount = 0;
this._tickInterval = self.setInterval(this._boundTick, millis);
return true;
}
return false;
}
/**
* @returns True when interval was cleared, false when none was set (no effect)
*/
public clearInterval(): boolean {
if (this._tickInterval) {
self.clearInterval(this._tickInterval);
this._tickInterval = null;
return true;
}
return false;
}
/**
* @returns True when timeout was cleared, false when none was set (no effect)
*/
public clearNextTick(): boolean {
if (this._tickTimer) {
self.clearTimeout(this._tickTimer);
this._tickTimer = null;
return true;
}
return false;
}
/**
* Will call the subclass doTick implementation in this main loop tick
* or in the next one (via setTimeout(,0)) in case it has already been called
* in this tick (in case this is a re-entrant call).
*/
public tick(): void {
this._tickCallCount++;
if (this._tickCallCount === 1) {
this.doTick();
// re-entrant call to tick from previous doTick call stack
// -> schedule a call on the next main loop iteration to process this task processing request
if (this._tickCallCount > 1) {
// make sure only one timer exists at any time at max
this.tickImmediate();
}
this._tickCallCount = 0;
}
}
public tickImmediate(): void {
this.clearNextTick();
this._tickTimer = self.setTimeout(this._boundTick, 0);
}
/**
* For subclass to implement task logic
* @abstract
*/
protected doTick(): void {}
}

38
server/node_modules/hls.js/src/types/buffer.ts generated vendored Normal file
View File

@@ -0,0 +1,38 @@
export type SourceBufferName = 'video' | 'audio' | 'audiovideo';
// eslint-disable-next-line no-restricted-globals
export type ExtendedSourceBuffer = SourceBuffer & {
ended?: boolean;
ending?: boolean;
changeType?: (type: string) => void;
};
export type SourceBuffers = Partial<
Record<SourceBufferName, ExtendedSourceBuffer>
>;
export interface BufferOperationQueues {
video: Array<BufferOperation>;
audio: Array<BufferOperation>;
audiovideo: Array<BufferOperation>;
}
export interface BufferOperation {
execute: Function;
onStart: Function;
onComplete: Function;
onError: Function;
start?: number;
end?: number;
}
export interface SourceBufferListeners {
video: Array<SourceBufferListener>;
audio: Array<SourceBufferListener>;
audiovideo: Array<SourceBufferListener>;
}
export interface SourceBufferListener {
event: string;
listener: EventListener;
}

20
server/node_modules/hls.js/src/types/component-api.ts generated vendored Normal file
View File

@@ -0,0 +1,20 @@
import EwmaBandWidthEstimator from '../utils/ewma-bandwidth-estimator';
export interface ComponentAPI {
destroy(): void;
}
export interface AbrComponentAPI extends ComponentAPI {
firstAutoLevel: number;
forcedAutoLevel: number;
nextAutoLevel: number;
readonly bwEstimator?: EwmaBandWidthEstimator;
resetEstimator(abrEwmaDefaultEstimate: number);
}
export interface NetworkComponentAPI extends ComponentAPI {
startLoad(startPosition: number): void;
stopLoad(): void;
pauseBuffering?(): void;
resumeBuffering?(): void;
}

155
server/node_modules/hls.js/src/types/demuxer.ts generated vendored Normal file
View File

@@ -0,0 +1,155 @@
import type { RationalTimestamp } from '../utils/timescale-conversion';
export interface Demuxer {
demux(
data: Uint8Array,
timeOffset: number,
isSampleAes?: boolean,
flush?: boolean,
): DemuxerResult;
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number,
): Promise<DemuxerResult>;
flush(timeOffset?: number): DemuxerResult | Promise<DemuxerResult>;
destroy(): void;
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
trackDuration: number,
);
resetTimeStamp(defaultInitPTS?: RationalTimestamp | null): void;
resetContiguity(): void;
}
export interface DemuxerResult {
audioTrack: DemuxedAudioTrack;
videoTrack: DemuxedVideoTrackBase;
id3Track: DemuxedMetadataTrack;
textTrack: DemuxedUserdataTrack;
}
export interface DemuxedTrack {
type: string;
id: number;
pid: number;
inputTimeScale: number;
sequenceNumber: number;
samples:
| AudioSample[]
| VideoSample[]
| MetadataSample[]
| UserdataSample[]
| Uint8Array;
timescale?: number;
container?: string;
dropped: number;
duration?: number;
pesData?: ElementaryStreamData | null;
codec?: string;
}
export interface PassthroughTrack extends DemuxedTrack {
sampleDuration: number;
samples: Uint8Array;
timescale: number;
duration: number;
codec: string;
}
export interface DemuxedAudioTrack extends DemuxedTrack {
config?: number[] | Uint8Array;
samplerate?: number;
segmentCodec?: string;
channelCount?: number;
manifestCodec?: string;
samples: AudioSample[];
}
export interface DemuxedVideoTrackBase extends DemuxedTrack {
width?: number;
height?: number;
pixelRatio?: [number, number];
audFound?: boolean;
pps?: Uint8Array[];
sps?: Uint8Array[];
naluState?: number;
segmentCodec?: string;
manifestCodec?: string;
samples: VideoSample[] | Uint8Array;
}
export interface DemuxedVideoTrack extends DemuxedVideoTrackBase {
samples: VideoSample[];
}
export interface DemuxedMetadataTrack extends DemuxedTrack {
samples: MetadataSample[];
}
export interface DemuxedUserdataTrack extends DemuxedTrack {
samples: UserdataSample[];
}
export const enum MetadataSchema {
audioId3 = 'org.id3',
dateRange = 'com.apple.quicktime.HLS',
emsg = 'https://aomedia.org/emsg/ID3',
}
export interface MetadataSample {
pts: number;
dts: number;
duration: number;
len?: number;
data: Uint8Array;
type: MetadataSchema;
}
export interface UserdataSample {
pts: number;
bytes?: Uint8Array;
type?: number;
payloadType?: number;
uuid?: string;
userData?: string;
userDataBytes?: Uint8Array;
}
export interface VideoSample {
dts: number;
pts: number;
key: boolean;
frame: boolean;
units: VideoSampleUnit[];
debug: string;
length: number;
}
export interface VideoSampleUnit {
data: Uint8Array;
type: number;
state?: number;
}
export type AudioSample = {
unit: Uint8Array;
pts: number;
};
export type AudioFrame = {
sample: AudioSample;
length: number;
missing: number;
};
export interface ElementaryStreamData {
data: Uint8Array[];
size: number;
}
export interface KeyData {
method: string;
key: Uint8Array;
iv: Uint8Array;
}

412
server/node_modules/hls.js/src/types/events.ts generated vendored Normal file
View File

@@ -0,0 +1,412 @@
// eslint-disable-next-line import/no-duplicates
import type { Fragment } from '../loader/fragment';
// eslint-disable-next-line import/no-duplicates
import type { Part } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
import type {
HdcpLevel,
HlsUrlParameters,
Level,
LevelAttributes,
LevelParsed,
VariableMap,
} from './level';
import type { MediaPlaylist, MediaPlaylistType } from './media-playlist';
import type {
Loader,
LoaderContext,
LoaderResponse,
LoaderStats,
PlaylistLevelType,
PlaylistLoaderContext,
} from './loader';
import type { Track, TrackSet } from './track';
import type { SourceBufferName } from './buffer';
import type { ChunkMetadata } from './transmuxer';
import type { LoadStats } from '../loader/load-stats';
import type { ErrorDetails, ErrorTypes } from '../errors';
import type { MetadataSample, UserdataSample } from './demuxer';
import type { AttrList } from '../utils/attr-list';
import type { HlsListeners } from '../events';
import type { KeyLoaderInfo } from '../loader/key-loader';
import type { LevelKey } from '../loader/level-key';
import type { IErrorAction } from '../controller/error-controller';
import type { SteeringManifest } from '../controller/content-steering-controller';
export interface MediaAttachingData {
media: HTMLMediaElement;
}
export interface MediaAttachedData {
media: HTMLMediaElement;
mediaSource?: MediaSource;
}
export interface BufferCodecsData {
video?: Track;
audio?: Track;
}
export interface BufferCreatedData {
tracks: TrackSet;
}
export interface BufferAppendingData {
type: SourceBufferName;
frag: Fragment;
part: Part | null;
chunkMeta: ChunkMetadata;
parent: PlaylistLevelType;
data: Uint8Array;
}
export interface BufferAppendedData {
type: SourceBufferName;
frag: Fragment;
part: Part | null;
chunkMeta: ChunkMetadata;
parent: PlaylistLevelType;
timeRanges: Partial<Record<SourceBufferName, TimeRanges>>;
}
export interface BufferEOSData {
type?: SourceBufferName;
}
export interface BufferFlushingData {
startOffset: number;
endOffset: number;
endOffsetSubtitles?: number;
type: SourceBufferName | null;
}
export interface BufferFlushedData {
type: SourceBufferName;
}
export interface ManifestLoadingData {
url: string;
}
export type ContentSteeringOptions = {
uri: string;
pathwayId: string;
};
export interface ManifestLoadedData {
audioTracks: MediaPlaylist[];
captions?: MediaPlaylist[];
contentSteering: ContentSteeringOptions | null;
levels: LevelParsed[];
networkDetails: any;
sessionData: Record<string, AttrList> | null;
sessionKeys: LevelKey[] | null;
startTimeOffset: number | null;
stats: LoaderStats;
subtitles?: MediaPlaylist[];
url: string;
variableList: VariableMap | null;
}
export interface ManifestParsedData {
levels: Level[];
audioTracks: MediaPlaylist[];
subtitleTracks: MediaPlaylist[];
sessionData: Record<string, AttrList> | null;
sessionKeys: LevelKey[] | null;
firstLevel: number;
stats: LoaderStats;
audio: boolean;
video: boolean;
altAudio: boolean;
}
export interface LevelSwitchingData {
level: number;
attrs: LevelAttributes;
details: LevelDetails | undefined;
bitrate: number;
averageBitrate: number;
maxBitrate: number;
realBitrate: number;
width: number;
height: number;
codecSet: string;
audioCodec: string | undefined;
videoCodec: string | undefined;
audioGroups: (string | undefined)[] | undefined;
subtitleGroups: (string | undefined)[] | undefined;
loaded: { bytes: number; duration: number } | undefined;
loadError: number;
fragmentError: number;
name: string | undefined;
id: number;
uri: string;
// Deprecated (retained for backwards compatibility)
url: string[];
urlId: 0;
audioGroupIds: (string | undefined)[] | undefined;
textGroupIds: (string | undefined)[] | undefined;
}
export interface LevelSwitchedData {
level: number;
}
export interface TrackLoadingData {
id: number;
groupId: string;
url: string;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelLoadingData {
id: number;
level: number;
pathwayId: string | undefined;
url: string;
deliveryDirectives: HlsUrlParameters | null;
}
export interface TrackLoadedData {
details: LevelDetails;
id: number;
groupId: string;
networkDetails: any;
stats: LoaderStats;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelLoadedData {
details: LevelDetails;
id: number;
level: number;
networkDetails: any;
stats: LoaderStats;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelUpdatedData {
details: LevelDetails;
level: number;
}
export interface LevelPTSUpdatedData {
details: LevelDetails;
level: Level;
drift: number;
type: string;
frag: Fragment;
start: number;
end: number;
}
export interface AudioTrackSwitchingData extends MediaPlaylist {}
export interface AudioTrackSwitchedData extends MediaPlaylist {}
export interface AudioTrackLoadedData extends TrackLoadedData {}
export interface AudioTracksUpdatedData {
audioTracks: MediaPlaylist[];
}
export interface SubtitleTracksUpdatedData {
subtitleTracks: MediaPlaylist[];
}
export interface SubtitleTrackSwitchData {
id: number;
name?: string;
groupId?: string;
type?: MediaPlaylistType | 'main';
url?: string;
}
export interface SubtitleTrackLoadedData extends TrackLoadedData {}
export interface TrackSwitchedData {
id: number;
}
export interface SubtitleFragProcessed {
success: boolean;
frag: Fragment;
}
export interface FragChangedData {
frag: Fragment;
}
export interface FPSDropData {
currentDropped: number;
currentDecoded: number;
totalDroppedFrames: number;
}
export interface FPSDropLevelCappingData {
droppedLevel: number;
level: number;
}
export interface MaxAutoLevelUpdatedData {
autoLevelCapping: number;
levels: Level[] | null;
maxAutoLevel: number;
minAutoLevel: number;
maxHdcpLevel: HdcpLevel;
}
export interface ErrorData {
type: ErrorTypes;
details: ErrorDetails;
error: Error;
fatal: boolean;
errorAction?: IErrorAction;
buffer?: number;
bytes?: number;
chunkMeta?: ChunkMetadata;
context?: PlaylistLoaderContext;
event?: keyof HlsListeners | 'demuxerWorker';
frag?: Fragment;
part?: Part | null;
level?: number | undefined;
levelRetry?: boolean;
loader?: Loader<LoaderContext>;
networkDetails?: any;
stats?: LoaderStats;
mimeType?: string;
reason?: string;
response?: LoaderResponse;
url?: string;
parent?: PlaylistLevelType;
sourceBufferName?: SourceBufferName;
/**
* @deprecated Use ErrorData.error
*/
err?: {
// comes from transmuxer interface
message: string;
};
}
export interface SubtitleFragProcessedData {
success: boolean;
frag: Fragment;
error?: Error;
}
export interface CuesParsedData {
type: 'captions' | 'subtitles';
cues: any;
track: string;
}
export interface NonNativeTextTrack {
_id?: string;
label: any;
kind: string;
default: boolean;
closedCaptions?: MediaPlaylist;
subtitleTrack?: MediaPlaylist;
}
export interface NonNativeTextTracksData {
tracks: Array<NonNativeTextTrack>;
}
export interface InitPTSFoundData {
id: string;
frag: Fragment;
initPTS: number;
timescale: number;
}
export interface FragLoadingData {
frag: Fragment;
part?: Part;
targetBufferTime: number | null;
}
export interface FragLoadEmergencyAbortedData {
frag: Fragment;
part: Part | null;
stats: LoaderStats;
}
export interface FragLoadedData {
frag: Fragment;
part: Part | null;
payload: ArrayBuffer;
networkDetails: unknown;
}
export interface PartsLoadedData {
frag: Fragment;
part: Part | null;
partsLoaded?: FragLoadedData[];
}
export interface FragDecryptedData {
frag: Fragment;
payload: ArrayBuffer;
stats: {
tstart: number;
tdecrypt: number;
};
}
export interface FragParsingInitSegmentData {}
export interface FragParsingUserdataData {
id: string;
frag: Fragment;
details: LevelDetails;
samples: UserdataSample[];
}
export interface FragParsingMetadataData {
id: string;
frag: Fragment;
details: LevelDetails;
samples: MetadataSample[];
}
export interface FragParsedData {
frag: Fragment;
part: Part | null;
}
export interface FragBufferedData {
stats: LoadStats;
frag: Fragment;
part: Part | null;
id: string;
}
export interface LevelsUpdatedData {
levels: Array<Level>;
}
export interface KeyLoadingData {
frag: Fragment;
}
export interface KeyLoadedData {
frag: Fragment;
keyInfo: KeyLoaderInfo;
}
export interface BackBufferData {
bufferEnd: number;
}
/**
* @deprecated Use BackBufferData
*/
export interface LiveBackBufferData extends BackBufferData {}
export interface SteeringManifestLoadedData {
steeringManifest: SteeringManifest;
url: string;
}

View File

@@ -0,0 +1,23 @@
import type { Fragment } from '../loader/fragment';
import type { SourceBufferName } from './buffer';
import type { FragLoadedData } from './events';
export interface FragmentEntity {
body: Fragment;
// appendedPTS is the latest buffered presentation time within the fragment's time range.
// It is used to determine: which fragment is appended at any given position, and hls.currentLevel.
appendedPTS: number | null;
loaded: FragLoadedData | null;
buffered: boolean;
range: { [key in SourceBufferName]: FragmentBufferedRange };
}
export interface FragmentTimeRange {
startPTS: number;
endPTS: number;
}
export interface FragmentBufferedRange {
time: Array<FragmentTimeRange>;
partial: boolean;
}

6
server/node_modules/hls.js/src/types/general.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
/**
* Make specific properties in T required
*/
export type RequiredProperties<T, K extends keyof T> = T & {
[P in K]-?: T[P];
};

256
server/node_modules/hls.js/src/types/level.ts generated vendored Executable file
View File

@@ -0,0 +1,256 @@
import type { MediaPlaylist } from './media-playlist';
import type { LevelDetails } from '../loader/level-details';
import type { AttrList } from '../utils/attr-list';
import type { MediaDecodingInfo } from '../utils/mediacapabilities-helper';
export interface LevelParsed {
attrs: LevelAttributes;
audioCodec?: string;
bitrate: number;
details?: LevelDetails;
height?: number;
id?: number;
name: string;
textCodec?: string;
unknownCodecs?: string[];
url: string;
videoCodec?: string;
width?: number;
}
export interface LevelAttributes extends AttrList {
'ALLOWED-CPC'?: string;
AUDIO?: string;
'AVERAGE-BANDWIDTH'?: string;
BANDWIDTH?: string;
'CLOSED-CAPTIONS'?: string;
CODECS?: string;
'FRAME-RATE'?: string;
'HDCP-LEVEL'?: 'TYPE-0' | 'TYPE-1' | 'NONE';
'PATHWAY-ID'?: string;
RESOLUTION?: string;
SCORE?: string;
'STABLE-VARIANT-ID'?: string;
SUBTITLES?: string;
'SUPPLEMENTAL-CODECS'?: string;
VIDEO?: string;
'VIDEO-RANGE'?: VideoRange;
}
export const HdcpLevels = ['NONE', 'TYPE-0', 'TYPE-1', null] as const;
export type HdcpLevel = (typeof HdcpLevels)[number];
export function isHdcpLevel(value: any): value is HdcpLevel {
return HdcpLevels.indexOf(value) > -1;
}
export const VideoRangeValues = ['SDR', 'PQ', 'HLG'] as const;
export type VideoRange = (typeof VideoRangeValues)[number];
export function isVideoRange(value: any): value is VideoRange {
return !!value && VideoRangeValues.indexOf(value) > -1;
}
export type VariableMap = Record<string, string>;
export const enum HlsSkip {
No = '',
Yes = 'YES',
v2 = 'v2',
}
export function getSkipValue(details: LevelDetails): HlsSkip {
const { canSkipUntil, canSkipDateRanges, age } = details;
// A Client SHOULD NOT request a Playlist Delta Update unless it already
// has a version of the Playlist that is no older than one-half of the Skip Boundary.
// @see: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-6.3.7
const playlistRecentEnough = age < canSkipUntil / 2;
if (canSkipUntil && playlistRecentEnough) {
if (canSkipDateRanges) {
return HlsSkip.v2;
}
return HlsSkip.Yes;
}
return HlsSkip.No;
}
export class HlsUrlParameters {
msn?: number;
part?: number;
skip?: HlsSkip;
constructor(msn?: number, part?: number, skip?: HlsSkip) {
this.msn = msn;
this.part = part;
this.skip = skip;
}
addDirectives(uri: string): string | never {
const url: URL = new self.URL(uri);
if (this.msn !== undefined) {
url.searchParams.set('_HLS_msn', this.msn.toString());
}
if (this.part !== undefined) {
url.searchParams.set('_HLS_part', this.part.toString());
}
if (this.skip) {
url.searchParams.set('_HLS_skip', this.skip);
}
return url.href;
}
}
export class Level {
public readonly _attrs: LevelAttributes[];
public readonly audioCodec: string | undefined;
public readonly bitrate: number;
public readonly codecSet: string;
public readonly url: string[];
public readonly frameRate: number;
public readonly height: number;
public readonly id: number;
public readonly name: string;
public readonly videoCodec: string | undefined;
public readonly width: number;
public details?: LevelDetails;
public fragmentError: number = 0;
public loadError: number = 0;
public loaded?: { bytes: number; duration: number };
public realBitrate: number = 0;
public supportedPromise?: Promise<MediaDecodingInfo>;
public supportedResult?: MediaDecodingInfo;
private _avgBitrate: number = 0;
private _audioGroups?: (string | undefined)[];
private _subtitleGroups?: (string | undefined)[];
// Deprecated (retained for backwards compatibility)
private readonly _urlId: number = 0;
constructor(data: LevelParsed | MediaPlaylist) {
this.url = [data.url];
this._attrs = [data.attrs];
this.bitrate = data.bitrate;
if (data.details) {
this.details = data.details;
}
this.id = data.id || 0;
this.name = data.name;
this.width = data.width || 0;
this.height = data.height || 0;
this.frameRate = data.attrs.optionalFloat('FRAME-RATE', 0);
this._avgBitrate = data.attrs.decimalInteger('AVERAGE-BANDWIDTH');
this.audioCodec = data.audioCodec;
this.videoCodec = data.videoCodec;
this.codecSet = [data.videoCodec, data.audioCodec]
.filter((c) => !!c)
.map((s: string) => s.substring(0, 4))
.join(',');
this.addGroupId('audio', data.attrs.AUDIO);
this.addGroupId('text', data.attrs.SUBTITLES);
}
get maxBitrate(): number {
return Math.max(this.realBitrate, this.bitrate);
}
get averageBitrate(): number {
return this._avgBitrate || this.realBitrate || this.bitrate;
}
get attrs(): LevelAttributes {
return this._attrs[0];
}
get codecs(): string {
return this.attrs.CODECS || '';
}
get pathwayId(): string {
return this.attrs['PATHWAY-ID'] || '.';
}
get videoRange(): VideoRange {
return this.attrs['VIDEO-RANGE'] || 'SDR';
}
get score(): number {
return this.attrs.optionalFloat('SCORE', 0);
}
get uri(): string {
return this.url[0] || '';
}
hasAudioGroup(groupId: string | undefined): boolean {
return hasGroup(this._audioGroups, groupId);
}
hasSubtitleGroup(groupId: string | undefined): boolean {
return hasGroup(this._subtitleGroups, groupId);
}
get audioGroups(): (string | undefined)[] | undefined {
return this._audioGroups;
}
get subtitleGroups(): (string | undefined)[] | undefined {
return this._subtitleGroups;
}
addGroupId(type: string, groupId: string | undefined) {
if (!groupId) {
return;
}
if (type === 'audio') {
let audioGroups = this._audioGroups;
if (!audioGroups) {
audioGroups = this._audioGroups = [];
}
if (audioGroups.indexOf(groupId) === -1) {
audioGroups.push(groupId);
}
} else if (type === 'text') {
let subtitleGroups = this._subtitleGroups;
if (!subtitleGroups) {
subtitleGroups = this._subtitleGroups = [];
}
if (subtitleGroups.indexOf(groupId) === -1) {
subtitleGroups.push(groupId);
}
}
}
// Deprecated methods (retained for backwards compatibility)
get urlId(): number {
return 0;
}
set urlId(value: number) {}
get audioGroupIds(): (string | undefined)[] | undefined {
return this.audioGroups ? [this.audioGroupId] : undefined;
}
get textGroupIds(): (string | undefined)[] | undefined {
return this.subtitleGroups ? [this.textGroupId] : undefined;
}
get audioGroupId(): string | undefined {
return this.audioGroups?.[0];
}
get textGroupId(): string | undefined {
return this.subtitleGroups?.[0];
}
addFallback() {}
}
function hasGroup(
groups: (string | undefined)[] | undefined,
groupId: string | undefined,
): boolean {
if (!groupId || !groups) {
return false;
}
return groups.indexOf(groupId) !== -1;
}

194
server/node_modules/hls.js/src/types/loader.ts generated vendored Normal file
View File

@@ -0,0 +1,194 @@
import type { LoaderConfig } from '../config';
import type { Fragment } from '../loader/fragment';
import type { Part } from '../loader/fragment';
import type { KeyLoaderInfo } from '../loader/key-loader';
import type { LevelDetails } from '../loader/level-details';
import type { HlsUrlParameters } from './level';
export interface LoaderContext {
// target URL
url: string;
// loader response type (arraybuffer or default response type for playlist)
responseType: string;
// headers
headers?: Record<string, string>;
// start byte range offset
rangeStart?: number;
// end byte range offset
rangeEnd?: number;
// true if onProgress should report partial chunk of loaded content
progressData?: boolean;
}
export interface FragmentLoaderContext extends LoaderContext {
frag: Fragment;
part: Part | null;
resetIV?: boolean;
}
export interface KeyLoaderContext extends LoaderContext {
keyInfo: KeyLoaderInfo;
frag: Fragment;
}
export interface LoaderConfiguration {
// LoaderConfig policy that overrides required settings
loadPolicy: LoaderConfig;
/**
* @deprecated use LoaderConfig timeoutRetry and errorRetry maxNumRetry
*/
// Max number of load retries
maxRetry: number;
/**
* @deprecated use LoaderConfig maxTimeToFirstByteMs and maxLoadTimeMs
*/
// Timeout after which `onTimeOut` callback will be triggered
// when loading has not finished after that delay
timeout: number;
/**
* @deprecated use LoaderConfig timeoutRetry and errorRetry retryDelayMs
*/
// Delay between an I/O error and following connection retry (ms).
// This to avoid spamming the server
retryDelay: number;
/**
* @deprecated use LoaderConfig timeoutRetry and errorRetry maxRetryDelayMs
*/
// max connection retry delay (ms)
maxRetryDelay: number;
// When streaming progressively, this is the minimum chunk size required to emit a PROGRESS event
highWaterMark?: number;
}
export interface LoaderResponse {
url: string;
data?: string | ArrayBuffer | Object;
// Errors can include HTTP status code and error message
// Successful responses should include status code 200
code?: number;
text?: string;
}
export interface LoaderStats {
aborted: boolean;
loaded: number;
retry: number;
total: number;
chunkCount: number;
bwEstimate: number;
loading: HlsProgressivePerformanceTiming;
parsing: HlsPerformanceTiming;
buffering: HlsProgressivePerformanceTiming;
}
export interface HlsPerformanceTiming {
start: number;
end: number;
}
export interface HlsChunkPerformanceTiming extends HlsPerformanceTiming {
executeStart: number;
executeEnd: number;
}
export interface HlsProgressivePerformanceTiming extends HlsPerformanceTiming {
first: number;
}
export type LoaderOnSuccess<T extends LoaderContext> = (
response: LoaderResponse,
stats: LoaderStats,
context: T,
networkDetails: any,
) => void;
export type LoaderOnProgress<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
data: string | ArrayBuffer,
networkDetails: any,
) => void;
export type LoaderOnError<T extends LoaderContext> = (
error: {
// error status code
code: number;
// error description
text: string;
},
context: T,
networkDetails: any,
stats: LoaderStats,
) => void;
export type LoaderOnTimeout<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
networkDetails: any,
) => void;
export type LoaderOnAbort<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
networkDetails: any,
) => void;
export interface LoaderCallbacks<T extends LoaderContext> {
onSuccess: LoaderOnSuccess<T>;
onError: LoaderOnError<T>;
onTimeout: LoaderOnTimeout<T>;
onAbort?: LoaderOnAbort<T>;
onProgress?: LoaderOnProgress<T>;
}
export interface Loader<T extends LoaderContext> {
destroy(): void;
abort(): void;
load(
context: T,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<T>,
): void;
/**
* `getCacheAge()` is called by hls.js to get the duration that a given object
* has been sitting in a cache proxy when playing live. If implemented,
* this should return a value in seconds.
*
* For HTTP based loaders, this should return the contents of the "age" header.
*
* @returns time object being lodaded
*/
getCacheAge?: () => number | null;
getResponseHeader?: (name: string) => string | null;
context: T | null;
stats: LoaderStats;
}
export const enum PlaylistContextType {
MANIFEST = 'manifest',
LEVEL = 'level',
AUDIO_TRACK = 'audioTrack',
SUBTITLE_TRACK = 'subtitleTrack',
}
export const enum PlaylistLevelType {
MAIN = 'main',
AUDIO = 'audio',
SUBTITLE = 'subtitle',
}
export interface PlaylistLoaderContext extends LoaderContext {
type: PlaylistContextType;
// the level index to load
level: number | null;
// level or track id from LevelLoadingData / TrackLoadingData
id: number | null;
// Media Playlist Group ID
groupId?: string;
// Content Steering Pathway ID (or undefined for default Pathway ".")
pathwayId?: string;
// internal representation of a parsed m3u8 level playlist
levelDetails?: LevelDetails;
// Blocking playlist request delivery directives (or null id none were added to playlist url
deliveryDirectives: HlsUrlParameters | null;
}

81
server/node_modules/hls.js/src/types/media-playlist.ts generated vendored Normal file
View File

@@ -0,0 +1,81 @@
import type { AttrList } from '../utils/attr-list';
import type { LevelDetails } from '../loader/level-details';
import type { VideoRange } from './level';
export type AudioPlaylistType = 'AUDIO';
export type MainPlaylistType = AudioPlaylistType | 'VIDEO';
export type SubtitlePlaylistType = 'SUBTITLES' | 'CLOSED-CAPTIONS';
export type MediaPlaylistType = MainPlaylistType | SubtitlePlaylistType;
export type VideoSelectionOption = {
preferHDR?: boolean;
allowedVideoRanges?: Array<VideoRange>;
};
export type AudioSelectionOption = {
lang?: string;
assocLang?: string;
characteristics?: string;
channels?: string;
name?: string;
audioCodec?: string;
groupId?: string;
default?: boolean;
};
export type SubtitleSelectionOption = {
lang?: string;
assocLang?: string;
characteristics?: string;
name?: string;
groupId?: string;
default?: boolean;
forced?: boolean;
};
// audioTracks, captions and subtitles returned by `M3U8Parser.parseMasterPlaylistMedia`
export interface MediaPlaylist {
attrs: MediaAttributes;
audioCodec?: string;
autoselect: boolean; // implicit false if not present
bitrate: number;
channels?: string;
characteristics?: string;
details?: LevelDetails;
height?: number;
default: boolean; // implicit false if not present
forced: boolean; // implicit false if not present
groupId: string; // required in HLS playlists
id: number; // incrementing number to track media playlists
instreamId?: string;
lang?: string;
assocLang?: string;
name: string;
textCodec?: string;
unknownCodecs?: string[];
// 'main' is a custom type added to signal a audioCodec in main track?; see playlist-loader~L310
type: MediaPlaylistType | 'main';
url: string;
videoCodec?: string;
width?: number;
}
export interface MediaAttributes extends AttrList {
'ASSOC-LANGUAGE'?: string;
AUTOSELECT?: 'YES' | 'NO';
CHANNELS?: string;
CHARACTERISTICS?: string;
DEFAULT?: 'YES' | 'NO';
FORCED?: 'YES' | 'NO';
'GROUP-ID': string;
'INSTREAM-ID'?: string;
LANGUAGE?: string;
NAME: string;
'PATHWAY-ID'?: string;
'STABLE-RENDITION-ID'?: string;
TYPE?: 'AUDIO' | 'VIDEO' | 'SUBTITLES' | 'CLOSED-CAPTIONS';
URI?: string;
}

77
server/node_modules/hls.js/src/types/remuxer.ts generated vendored Normal file
View File

@@ -0,0 +1,77 @@
import type { TrackSet } from './track';
import {
DemuxedAudioTrack,
DemuxedMetadataTrack,
DemuxedUserdataTrack,
DemuxedVideoTrackBase,
MetadataSample,
UserdataSample,
} from './demuxer';
import type { SourceBufferName } from './buffer';
import type { PlaylistLevelType } from './loader';
import type { DecryptData } from '../loader/level-key';
import type { RationalTimestamp } from '../utils/timescale-conversion';
export interface Remuxer {
remux(
audioTrack: DemuxedAudioTrack,
videoTrack: DemuxedVideoTrackBase,
id3Track: DemuxedMetadataTrack,
textTrack: DemuxedUserdataTrack,
timeOffset: number,
accurateTimeOffset: boolean,
flush: boolean,
playlistType: PlaylistLevelType,
): RemuxerResult;
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
decryptdata: DecryptData | null,
): void;
resetTimeStamp(defaultInitPTS: RationalTimestamp | null): void;
resetNextTimestamp(): void;
destroy(): void;
}
export interface RemuxedTrack {
data1: Uint8Array;
data2?: Uint8Array;
startPTS: number;
endPTS: number;
startDTS: number;
endDTS: number;
type: SourceBufferName;
hasAudio: boolean;
hasVideo: boolean;
independent?: boolean;
firstKeyFrame?: number;
firstKeyFramePTS?: number;
nb: number;
transferredData1?: ArrayBuffer;
transferredData2?: ArrayBuffer;
dropped?: number;
}
export interface RemuxedMetadata {
samples: MetadataSample[];
}
export interface RemuxedUserdata {
samples: UserdataSample[];
}
export interface RemuxerResult {
audio?: RemuxedTrack;
video?: RemuxedTrack;
text?: RemuxedUserdata;
id3?: RemuxedMetadata;
initSegment?: InitSegmentData;
independent?: boolean;
}
export interface InitSegmentData {
tracks?: TrackSet;
initPTS: number | undefined;
timescale: number | undefined;
}

15
server/node_modules/hls.js/src/types/track.ts generated vendored Normal file
View File

@@ -0,0 +1,15 @@
export interface TrackSet {
audio?: Track;
video?: Track;
audiovideo?: Track;
}
export interface Track {
id: 'audio' | 'main';
buffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
container: string;
codec?: string;
initSegment?: Uint8Array;
levelCodec?: string;
metadata?: any;
}

46
server/node_modules/hls.js/src/types/transmuxer.ts generated vendored Normal file
View File

@@ -0,0 +1,46 @@
import type { RemuxerResult } from './remuxer';
import type { HlsChunkPerformanceTiming } from './loader';
import type { SourceBufferName } from './buffer';
export interface TransmuxerResult {
remuxResult: RemuxerResult;
chunkMeta: ChunkMetadata;
}
export class ChunkMetadata {
public readonly level: number;
public readonly sn: number;
public readonly part: number;
public readonly id: number;
public readonly size: number;
public readonly partial: boolean;
public readonly transmuxing: HlsChunkPerformanceTiming =
getNewPerformanceTiming();
public readonly buffering: {
[key in SourceBufferName]: HlsChunkPerformanceTiming;
} = {
audio: getNewPerformanceTiming(),
video: getNewPerformanceTiming(),
audiovideo: getNewPerformanceTiming(),
};
constructor(
level: number,
sn: number,
id: number,
size = 0,
part = -1,
partial = false,
) {
this.level = level;
this.sn = sn;
this.id = id;
this.size = size;
this.part = part;
this.partial = partial;
}
}
function getNewPerformanceTiming(): HlsChunkPerformanceTiming {
return { start: 0, executeStart: 0, executeEnd: 0, end: 0 };
}

6
server/node_modules/hls.js/src/types/tuples.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
export type Tail<T extends any[]> = ((...t: T) => any) extends (
_: any,
...tail: infer U
) => any
? U
: [];

9
server/node_modules/hls.js/src/types/vtt.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
export type VTTCCs = {
ccOffset: number;
presentationOffset: number;
[key: number]: {
start: number;
prevCC: number;
new: boolean;
};
};

106
server/node_modules/hls.js/src/utils/attr-list.ts generated vendored Executable file
View File

@@ -0,0 +1,106 @@
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
export class AttrList {
[key: string]: any;
constructor(attrs: string | Record<string, any>) {
if (typeof attrs === 'string') {
attrs = AttrList.parseAttrList(attrs);
}
Object.assign(this, attrs);
}
get clientAttrs(): string[] {
return Object.keys(this).filter((attr) => attr.substring(0, 2) === 'X-');
}
decimalInteger(attrName: string): number {
const intValue = parseInt(this[attrName], 10);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
hexadecimalInteger(attrName: string) {
if (this[attrName]) {
let stringValue = (this[attrName] || '0x').slice(2);
stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
const value = new Uint8Array(stringValue.length / 2);
for (let i = 0; i < stringValue.length / 2; i++) {
value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
}
return value;
} else {
return null;
}
}
hexadecimalIntegerAsNumber(attrName: string): number {
const intValue = parseInt(this[attrName], 16);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
decimalFloatingPoint(attrName: string): number {
return parseFloat(this[attrName]);
}
optionalFloat(attrName: string, defaultValue: number): number {
const value = this[attrName];
return value ? parseFloat(value) : defaultValue;
}
enumeratedString(attrName: string): string | undefined {
return this[attrName];
}
bool(attrName: string): boolean {
return this[attrName] === 'YES';
}
decimalResolution(attrName: string):
| {
width: number;
height: number;
}
| undefined {
const res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
if (res === null) {
return undefined;
}
return {
width: parseInt(res[1], 10),
height: parseInt(res[2], 10),
};
}
static parseAttrList(input: string): Record<string, any> {
let match;
const attrs = {};
const quote = '"';
ATTR_LIST_REGEX.lastIndex = 0;
while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
let value = match[2];
if (
value.indexOf(quote) === 0 &&
value.lastIndexOf(quote) === value.length - 1
) {
value = value.slice(1, -1);
}
const name = match[1].trim();
attrs[name] = value;
}
return attrs;
}
}

46
server/node_modules/hls.js/src/utils/binary-search.ts generated vendored Normal file
View File

@@ -0,0 +1,46 @@
type BinarySearchComparison<T> = (candidate: T) => -1 | 0 | 1;
const BinarySearch = {
/**
* Searches for an item in an array which matches a certain condition.
* This requires the condition to only match one item in the array,
* and for the array to be ordered.
*
* @param list The array to search.
* @param comparisonFn
* Called and provided a candidate item as the first argument.
* Should return:
* > -1 if the item should be located at a lower index than the provided item.
* > 1 if the item should be located at a higher index than the provided item.
* > 0 if the item is the item you're looking for.
*
* @returns the object if found, otherwise returns null
*/
search: function <T>(
list: T[],
comparisonFn: BinarySearchComparison<T>,
): T | null {
let minIndex: number = 0;
let maxIndex: number = list.length - 1;
let currentIndex: number | null = null;
let currentElement: T | null = null;
while (minIndex <= maxIndex) {
currentIndex = ((minIndex + maxIndex) / 2) | 0;
currentElement = list[currentIndex];
const comparisonResult = comparisonFn(currentElement);
if (comparisonResult > 0) {
minIndex = currentIndex + 1;
} else if (comparisonResult < 0) {
maxIndex = currentIndex - 1;
} else {
return currentElement;
}
}
return null;
},
};
export default BinarySearch;

173
server/node_modules/hls.js/src/utils/buffer-helper.ts generated vendored Normal file
View File

@@ -0,0 +1,173 @@
/**
* Provides methods dealing with buffer length retrieval for example.
*
* In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
*
* Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
*/
import { logger } from './logger';
type BufferTimeRange = {
start: number;
end: number;
};
export type Bufferable = {
buffered: TimeRanges;
};
export type BufferInfo = {
len: number;
start: number;
end: number;
nextStart?: number;
};
const noopBuffered: TimeRanges = {
length: 0,
start: () => 0,
end: () => 0,
};
export class BufferHelper {
/**
* Return true if `media`'s buffered include `position`
*/
static isBuffered(media: Bufferable, position: number): boolean {
try {
if (media) {
const buffered = BufferHelper.getBuffered(media);
for (let i = 0; i < buffered.length; i++) {
if (position >= buffered.start(i) && position <= buffered.end(i)) {
return true;
}
}
}
} catch (error) {
// this is to catch
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
// This SourceBuffer has been removed from the parent media source
}
return false;
}
static bufferInfo(
media: Bufferable | null,
pos: number,
maxHoleDuration: number,
): BufferInfo {
try {
if (media) {
const vbuffered = BufferHelper.getBuffered(media);
const buffered: BufferTimeRange[] = [];
let i: number;
for (i = 0; i < vbuffered.length; i++) {
buffered.push({ start: vbuffered.start(i), end: vbuffered.end(i) });
}
return this.bufferedInfo(buffered, pos, maxHoleDuration);
}
} catch (error) {
// this is to catch
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
// This SourceBuffer has been removed from the parent media source
}
return { len: 0, start: pos, end: pos, nextStart: undefined };
}
static bufferedInfo(
buffered: BufferTimeRange[],
pos: number,
maxHoleDuration: number,
): {
len: number;
start: number;
end: number;
nextStart?: number;
} {
pos = Math.max(0, pos);
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
buffered.sort(function (a, b) {
const diff = a.start - b.start;
if (diff) {
return diff;
} else {
return b.end - a.end;
}
});
let buffered2: BufferTimeRange[] = [];
if (maxHoleDuration) {
// there might be some small holes between buffer time range
// consider that holes smaller than maxHoleDuration are irrelevant and build another
// buffer time range representations that discards those holes
for (let i = 0; i < buffered.length; i++) {
const buf2len = buffered2.length;
if (buf2len) {
const buf2end = buffered2[buf2len - 1].end;
// if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
if (buffered[i].start - buf2end < maxHoleDuration) {
// merge overlapping time ranges
// update lastRange.end only if smaller than item.end
// e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
// whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
if (buffered[i].end > buf2end) {
buffered2[buf2len - 1].end = buffered[i].end;
}
} else {
// big hole
buffered2.push(buffered[i]);
}
} else {
// first value
buffered2.push(buffered[i]);
}
}
} else {
buffered2 = buffered;
}
let bufferLen = 0;
// bufferStartNext can possibly be undefined based on the conditional logic below
let bufferStartNext: number | undefined;
// bufferStart and bufferEnd are buffer boundaries around current video position
let bufferStart: number = pos;
let bufferEnd: number = pos;
for (let i = 0; i < buffered2.length; i++) {
const start = buffered2[i].start;
const end = buffered2[i].end;
// logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
if (pos + maxHoleDuration >= start && pos < end) {
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
bufferStart = start;
bufferEnd = end;
bufferLen = bufferEnd - pos;
} else if (pos + maxHoleDuration < start) {
bufferStartNext = start;
break;
}
}
return {
len: bufferLen,
start: bufferStart || 0,
end: bufferEnd || 0,
nextStart: bufferStartNext,
};
}
/**
* Safe method to get buffered property.
* SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
*/
static getBuffered(media: Bufferable): TimeRanges {
try {
return media.buffered;
} catch (e) {
logger.log('failed to get media.buffered', e);
return noopBuffered;
}
}
}

1420
server/node_modules/hls.js/src/utils/cea-608-parser.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

42
server/node_modules/hls.js/src/utils/chunker.ts generated vendored Normal file
View File

@@ -0,0 +1,42 @@
import { appendUint8Array } from './mp4-tools';
import { sliceUint8 } from './typed-array';
export default class Chunker {
private chunkSize: number;
public cache: Uint8Array | null = null;
constructor(chunkSize = Math.pow(2, 19)) {
this.chunkSize = chunkSize;
}
public push(data: Uint8Array): Array<Uint8Array> {
const { cache, chunkSize } = this;
const result: Array<Uint8Array> = [];
let temp: Uint8Array | null = null;
if (cache?.length) {
temp = appendUint8Array(cache, data);
this.cache = null;
} else {
temp = data;
}
if (temp.length < chunkSize) {
this.cache = temp;
return result;
}
if (temp.length > chunkSize) {
let offset = 0;
const len = temp.length;
while (offset < len - chunkSize) {
result.push(sliceUint8(temp, offset, offset + chunkSize));
offset += chunkSize;
}
this.cache = sliceUint8(temp, offset);
} else {
result.push(temp);
}
return result;
}
}

215
server/node_modules/hls.js/src/utils/codecs.ts generated vendored Normal file
View File

@@ -0,0 +1,215 @@
import { getMediaSource } from './mediasource-helper';
// from http://mp4ra.org/codecs.html
// values indicate codec selection preference (lower is higher priority)
const sampleEntryCodesISO = {
audio: {
a3ds: 1,
'ac-3': 0.95,
'ac-4': 1,
alac: 0.9,
alaw: 1,
dra1: 1,
'dts+': 1,
'dts-': 1,
dtsc: 1,
dtse: 1,
dtsh: 1,
'ec-3': 0.9,
enca: 1,
fLaC: 0.9, // MP4-RA listed codec entry for FLAC
flac: 0.9, // legacy browser codec name for FLAC
FLAC: 0.9, // some manifests may list "FLAC" with Apple's tools
g719: 1,
g726: 1,
m4ae: 1,
mha1: 1,
mha2: 1,
mhm1: 1,
mhm2: 1,
mlpa: 1,
mp4a: 1,
'raw ': 1,
Opus: 1,
opus: 1, // browsers expect this to be lowercase despite MP4RA says 'Opus'
samr: 1,
sawb: 1,
sawp: 1,
sevc: 1,
sqcp: 1,
ssmv: 1,
twos: 1,
ulaw: 1,
},
video: {
avc1: 1,
avc2: 1,
avc3: 1,
avc4: 1,
avcp: 1,
av01: 0.8,
drac: 1,
dva1: 1,
dvav: 1,
dvh1: 0.7,
dvhe: 0.7,
encv: 1,
hev1: 0.75,
hvc1: 0.75,
mjp2: 1,
mp4v: 1,
mvc1: 1,
mvc2: 1,
mvc3: 1,
mvc4: 1,
resv: 1,
rv60: 1,
s263: 1,
svc1: 1,
svc2: 1,
'vc-1': 1,
vp08: 1,
vp09: 0.9,
},
text: {
stpp: 1,
wvtt: 1,
},
} as const;
export type CodecType = 'audio' | 'video';
export function isCodecType(codec: string, type: CodecType): boolean {
const typeCodes = sampleEntryCodesISO[type];
return !!typeCodes && !!typeCodes[codec.slice(0, 4)];
}
export function areCodecsMediaSourceSupported(
codecs: string,
type: CodecType,
preferManagedMediaSource = true,
): boolean {
return !codecs
.split(',')
.some(
(codec) =>
!isCodecMediaSourceSupported(codec, type, preferManagedMediaSource),
);
}
function isCodecMediaSourceSupported(
codec: string,
type: CodecType,
preferManagedMediaSource = true,
): boolean {
const MediaSource = getMediaSource(preferManagedMediaSource);
return MediaSource?.isTypeSupported(mimeTypeForCodec(codec, type)) ?? false;
}
export function mimeTypeForCodec(codec: string, type: CodecType): string {
return `${type}/mp4;codecs="${codec}"`;
}
export function videoCodecPreferenceValue(
videoCodec: string | undefined,
): number {
if (videoCodec) {
const fourCC = videoCodec.substring(0, 4);
return sampleEntryCodesISO.video[fourCC];
}
return 2;
}
export function codecsSetSelectionPreferenceValue(codecSet: string): number {
return codecSet.split(',').reduce((num, fourCC) => {
const preferenceValue = sampleEntryCodesISO.video[fourCC];
if (preferenceValue) {
return (preferenceValue * 2 + num) / (num ? 3 : 2);
}
return (sampleEntryCodesISO.audio[fourCC] + num) / (num ? 2 : 1);
}, 0);
}
interface CodecNameCache {
flac?: string;
opus?: string;
}
const CODEC_COMPATIBLE_NAMES: CodecNameCache = {};
type LowerCaseCodecType = 'flac' | 'opus';
function getCodecCompatibleNameLower(
lowerCaseCodec: LowerCaseCodecType,
preferManagedMediaSource = true,
): string {
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec]!;
}
// Idealy fLaC and Opus would be first (spec-compliant) but
// some browsers will report that fLaC is supported then fail.
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
const codecsToCheck = {
flac: ['flac', 'fLaC', 'FLAC'],
opus: ['opus', 'Opus'],
}[lowerCaseCodec];
for (let i = 0; i < codecsToCheck.length; i++) {
if (
isCodecMediaSourceSupported(
codecsToCheck[i],
'audio',
preferManagedMediaSource,
)
) {
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
return codecsToCheck[i];
}
}
return lowerCaseCodec;
}
const AUDIO_CODEC_REGEXP = /flac|opus/i;
export function getCodecCompatibleName(
codec: string,
preferManagedMediaSource = true,
): string {
return codec.replace(AUDIO_CODEC_REGEXP, (m) =>
getCodecCompatibleNameLower(
m.toLowerCase() as LowerCaseCodecType,
preferManagedMediaSource,
),
);
}
export function pickMostCompleteCodecName(
parsedCodec: string,
levelCodec: string | undefined,
): string | undefined {
// Parsing of mp4a codecs strings in mp4-tools from media is incomplete as of d8c6c7a
// so use level codec is parsed codec is unavailable or incomplete
if (parsedCodec && parsedCodec !== 'mp4a') {
return parsedCodec;
}
return levelCodec ? levelCodec.split(',')[0] : levelCodec;
}
export function convertAVC1ToAVCOTI(codec: string) {
// Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
// Examples: avc1.66.30 to avc1.42001e and avc1.77.30,avc1.66.30 to avc1.4d001e,avc1.42001e.
const codecs = codec.split(',');
for (let i = 0; i < codecs.length; i++) {
const avcdata = codecs[i].split('.');
if (avcdata.length > 2) {
let result = avcdata.shift() + '.';
result += parseInt(avcdata.shift() as string).toString(16);
result += (
'000' + parseInt(avcdata.shift() as string).toString(16)
).slice(-4);
codecs[i] = result;
}
}
return codecs.join(',');
}

96
server/node_modules/hls.js/src/utils/cues.ts generated vendored Normal file
View File

@@ -0,0 +1,96 @@
import { fixLineBreaks } from './vttparser';
import type { CaptionScreen, Row } from './cea-608-parser';
import { generateCueId } from './webvtt-parser';
import { addCueToTrack } from './texttrack-utils';
const WHITESPACE_CHAR = /\s/;
export interface CuesInterface {
newCue(
track: TextTrack | null,
startTime: number,
endTime: number,
captionScreen: CaptionScreen,
): VTTCue[];
}
const Cues: CuesInterface = {
newCue(
track: TextTrack | null,
startTime: number,
endTime: number,
captionScreen: CaptionScreen,
): VTTCue[] {
const result: VTTCue[] = [];
let row: Row;
// the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers
let cue: VTTCue;
let indenting: boolean;
let indent: number;
let text: string;
const Cue = (self.VTTCue || self.TextTrackCue) as any;
for (let r = 0; r < captionScreen.rows.length; r++) {
row = captionScreen.rows[r];
indenting = true;
indent = 0;
text = '';
if (!row.isEmpty()) {
for (let c = 0; c < row.chars.length; c++) {
if (WHITESPACE_CHAR.test(row.chars[c].uchar) && indenting) {
indent++;
} else {
text += row.chars[c].uchar;
indenting = false;
}
}
// To be used for cleaning-up orphaned roll-up captions
row.cueStartTime = startTime;
// Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
if (startTime === endTime) {
endTime += 0.0001;
}
if (indent >= 16) {
indent--;
} else {
indent++;
}
const cueText = fixLineBreaks(text.trim());
const id = generateCueId(startTime, endTime, cueText);
// If this cue already exists in the track do not push it
if (!track?.cues?.getCueById(id)) {
cue = new Cue(startTime, endTime, cueText);
cue.id = id;
cue.line = r + 1;
cue.align = 'left';
// Clamp the position between 10 and 80 percent (CEA-608 PAC indent code)
// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
// Firefox throws an exception and captions break with out of bounds 0-100 values
cue.position = 10 + Math.min(80, Math.floor((indent * 8) / 32) * 10);
result.push(cue);
}
}
}
if (track && result.length) {
// Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome
result.sort((cueA, cueB) => {
if (cueA.line === 'auto' || cueB.line === 'auto') {
return 0;
}
if (cueA.line > 8 && cueB.line > 8) {
return cueB.line - cueA.line;
}
return cueA.line - cueB.line;
});
result.forEach((cue) => addCueToTrack(track, cue));
}
return result;
},
};
export default Cues;

189
server/node_modules/hls.js/src/utils/discontinuities.ts generated vendored Normal file
View File

@@ -0,0 +1,189 @@
import { logger } from './logger';
import { adjustSliding } from './level-helper';
import type { Fragment } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
export function findFirstFragWithCC(
fragments: Fragment[],
cc: number,
): Fragment | null {
for (let i = 0, len = fragments.length; i < len; i++) {
if (fragments[i]?.cc === cc) {
return fragments[i];
}
}
return null;
}
export function shouldAlignOnDiscontinuities(
lastFrag: Fragment | null,
switchDetails: LevelDetails | undefined,
details: LevelDetails,
): switchDetails is LevelDetails & boolean {
if (switchDetails) {
if (
details.endCC > details.startCC ||
(lastFrag && lastFrag.cc < details.startCC)
) {
return true;
}
}
return false;
}
// Find the first frag in the previous level which matches the CC of the first frag of the new level
export function findDiscontinuousReferenceFrag(
prevDetails: LevelDetails,
curDetails: LevelDetails,
) {
const prevFrags = prevDetails.fragments;
const curFrags = curDetails.fragments;
if (!curFrags.length || !prevFrags.length) {
logger.log('No fragments to align');
return;
}
const prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
if (!prevStartFrag || (prevStartFrag && !prevStartFrag.startPTS)) {
logger.log('No frag in previous level to align on');
return;
}
return prevStartFrag;
}
function adjustFragmentStart(frag: Fragment, sliding: number) {
if (frag) {
const start = frag.start + sliding;
frag.start = frag.startPTS = start;
frag.endPTS = start + frag.duration;
}
}
export function adjustSlidingStart(sliding: number, details: LevelDetails) {
// Update segments
const fragments = details.fragments;
for (let i = 0, len = fragments.length; i < len; i++) {
adjustFragmentStart(fragments[i], sliding);
}
// Update LL-HLS parts at the end of the playlist
if (details.fragmentHint) {
adjustFragmentStart(details.fragmentHint, sliding);
}
details.alignedSliding = true;
}
/**
* Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
* contiguous stream with the last fragments.
* The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
* download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
* and an extra download.
* @param lastFrag
* @param lastLevel
* @param details
*/
export function alignStream(
lastFrag: Fragment | null,
switchDetails: LevelDetails | undefined,
details: LevelDetails,
) {
if (!switchDetails) {
return;
}
alignDiscontinuities(lastFrag, details, switchDetails);
if (!details.alignedSliding && switchDetails) {
// If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
// Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
// discontinuity sequence.
alignMediaPlaylistByPDT(details, switchDetails);
}
if (!details.alignedSliding && switchDetails && !details.skippedSegments) {
// Try to align on sn so that we pick a better start fragment.
// Do not perform this on playlists with delta updates as this is only to align levels on switch
// and adjustSliding only adjusts fragments after skippedSegments.
adjustSliding(switchDetails, details);
}
}
/**
* Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
* discontinuity sequence.
* @param lastFrag - The last Fragment which shares the same discontinuity sequence
* @param lastLevel - The details of the last loaded level
* @param details - The details of the new level
*/
function alignDiscontinuities(
lastFrag: Fragment | null,
details: LevelDetails,
switchDetails: LevelDetails | undefined,
) {
if (shouldAlignOnDiscontinuities(lastFrag, switchDetails, details)) {
const referenceFrag = findDiscontinuousReferenceFrag(
switchDetails,
details,
);
if (referenceFrag && Number.isFinite(referenceFrag.start)) {
logger.log(
`Adjusting PTS using last level due to CC increase within current level ${details.url}`,
);
adjustSlidingStart(referenceFrag.start, details);
}
}
}
/**
* Ensures appropriate time-alignment between renditions based on PDT.
* This function assumes the timelines represented in `refDetails` are accurate, including the PDTs
* for the last discontinuity sequence number shared by both playlists when present,
* and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
* times/timelines of `details` accordingly.
* Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
* the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
* are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
* be consistent across playlists, per the HLS spec.
* @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
* @param refDetails - The details of the reference rendition with start and PDT times for alignment.
*/
export function alignMediaPlaylistByPDT(
details: LevelDetails,
refDetails: LevelDetails,
) {
if (!details.hasProgramDateTime || !refDetails.hasProgramDateTime) {
return;
}
const fragments = details.fragments;
const refFragments = refDetails.fragments;
if (!fragments.length || !refFragments.length) {
return;
}
// Calculate a delta to apply to all fragments according to the delta in PDT times and start times
// of a fragment in the reference details, and a fragment in the target details of the same discontinuity.
// If a fragment of the same discontinuity was not found use the middle fragment of both.
let refFrag: Fragment | null | undefined;
let frag: Fragment | null | undefined;
const targetCC = Math.min(refDetails.endCC, details.endCC);
if (refDetails.startCC < targetCC && details.startCC < targetCC) {
refFrag = findFirstFragWithCC(refFragments, targetCC);
frag = findFirstFragWithCC(fragments, targetCC);
}
if (!refFrag || !frag) {
refFrag = refFragments[Math.floor(refFragments.length / 2)];
frag =
findFirstFragWithCC(fragments, refFrag.cc) ||
fragments[Math.floor(fragments.length / 2)];
}
const refPDT = refFrag.programDateTime;
const targetPDT = frag.programDateTime;
if (!refPDT || !targetPDT) {
return;
}
const delta = (targetPDT - refPDT) / 1000 - (frag.start - refFrag.start);
adjustSlidingStart(delta, details);
}

80
server/node_modules/hls.js/src/utils/error-helper.ts generated vendored Normal file
View File

@@ -0,0 +1,80 @@
import { ErrorDetails } from '../errors';
import type { LoadPolicy, LoaderConfig, RetryConfig } from '../config';
import type { ErrorData } from '../types/events';
import type { LoaderResponse } from '../types/loader';
export function isTimeoutError(error: ErrorData): boolean {
switch (error.details) {
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_TIMEOUT:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
case ErrorDetails.MANIFEST_LOAD_TIMEOUT:
return true;
}
return false;
}
export function getRetryConfig(
loadPolicy: LoadPolicy,
error: ErrorData,
): RetryConfig | null {
const isTimeout = isTimeoutError(error);
return loadPolicy.default[`${isTimeout ? 'timeout' : 'error'}Retry`];
}
export function getRetryDelay(
retryConfig: RetryConfig,
retryCount: number,
): number {
// exponential backoff capped to max retry delay
const backoffFactor =
retryConfig.backoff === 'linear' ? 1 : Math.pow(2, retryCount);
return Math.min(
backoffFactor * retryConfig.retryDelayMs,
retryConfig.maxRetryDelayMs,
);
}
export function getLoaderConfigWithoutReties(
loderConfig: LoaderConfig,
): LoaderConfig {
return {
...loderConfig,
...{
errorRetry: null,
timeoutRetry: null,
},
};
}
export function shouldRetry(
retryConfig: RetryConfig | null | undefined,
retryCount: number,
isTimeout: boolean,
loaderResponse?: LoaderResponse | undefined,
): retryConfig is RetryConfig & boolean {
if (!retryConfig) {
return false;
}
const httpStatus = loaderResponse?.code;
const retry =
retryCount < retryConfig.maxNumRetry &&
(retryForHttpStatus(httpStatus) || !!isTimeout);
return retryConfig.shouldRetry
? retryConfig.shouldRetry(
retryConfig,
retryCount,
isTimeout,
loaderResponse,
retry,
)
: retry;
}
export function retryForHttpStatus(httpStatus: number | undefined) {
// Do not retry on status 4xx, status 0 (CORS error), or undefined (decrypt/gap/parse error)
return (
(httpStatus === 0 && navigator.onLine === false) ||
(!!httpStatus && (httpStatus < 400 || httpStatus > 499))
);
}

View File

@@ -0,0 +1,93 @@
/*
* EWMA Bandwidth Estimator
* - heavily inspired from shaka-player
* Tracks bandwidth samples and estimates available bandwidth.
* Based on the minimum of two exponentially-weighted moving averages with
* different half-lives.
*/
import EWMA from '../utils/ewma';
class EwmaBandWidthEstimator {
private defaultEstimate_: number;
private minWeight_: number;
private minDelayMs_: number;
private slow_: EWMA;
private fast_: EWMA;
private defaultTTFB_: number;
private ttfb_: EWMA;
constructor(
slow: number,
fast: number,
defaultEstimate: number,
defaultTTFB: number = 100,
) {
this.defaultEstimate_ = defaultEstimate;
this.minWeight_ = 0.001;
this.minDelayMs_ = 50;
this.slow_ = new EWMA(slow);
this.fast_ = new EWMA(fast);
this.defaultTTFB_ = defaultTTFB;
this.ttfb_ = new EWMA(slow);
}
update(slow: number, fast: number) {
const { slow_, fast_, ttfb_ } = this;
if (slow_.halfLife !== slow) {
this.slow_ = new EWMA(slow, slow_.getEstimate(), slow_.getTotalWeight());
}
if (fast_.halfLife !== fast) {
this.fast_ = new EWMA(fast, fast_.getEstimate(), fast_.getTotalWeight());
}
if (ttfb_.halfLife !== slow) {
this.ttfb_ = new EWMA(slow, ttfb_.getEstimate(), ttfb_.getTotalWeight());
}
}
sample(durationMs: number, numBytes: number) {
durationMs = Math.max(durationMs, this.minDelayMs_);
const numBits = 8 * numBytes;
// weight is duration in seconds
const durationS = durationMs / 1000;
// value is bandwidth in bits/s
const bandwidthInBps = numBits / durationS;
this.fast_.sample(durationS, bandwidthInBps);
this.slow_.sample(durationS, bandwidthInBps);
}
sampleTTFB(ttfb: number) {
// weight is frequency curve applied to TTFB in seconds
// (longer times have less weight with expected input under 1 second)
const seconds = ttfb / 1000;
const weight = Math.sqrt(2) * Math.exp(-Math.pow(seconds, 2) / 2);
this.ttfb_.sample(weight, Math.max(ttfb, 5));
}
canEstimate(): boolean {
return this.fast_.getTotalWeight() >= this.minWeight_;
}
getEstimate(): number {
if (this.canEstimate()) {
// console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
// console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
// Take the minimum of these two estimates. This should have the effect of
// adapting down quickly, but up more slowly.
return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
} else {
return this.defaultEstimate_;
}
}
getEstimateTTFB(): number {
if (this.ttfb_.getTotalWeight() >= this.minWeight_) {
return this.ttfb_.getEstimate();
} else {
return this.defaultTTFB_;
}
}
destroy() {}
}
export default EwmaBandWidthEstimator;

43
server/node_modules/hls.js/src/utils/ewma.ts generated vendored Normal file
View File

@@ -0,0 +1,43 @@
/*
* compute an Exponential Weighted moving average
* - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
* - heavily inspired from shaka-player
*/
class EWMA {
public readonly halfLife: number;
private alpha_: number;
private estimate_: number;
private totalWeight_: number;
// About half of the estimated value will be from the last |halfLife| samples by weight.
constructor(halfLife: number, estimate: number = 0, weight: number = 0) {
this.halfLife = halfLife;
// Larger values of alpha expire historical data more slowly.
this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
this.estimate_ = estimate;
this.totalWeight_ = weight;
}
sample(weight: number, value: number) {
const adjAlpha = Math.pow(this.alpha_, weight);
this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
this.totalWeight_ += weight;
}
getTotalWeight(): number {
return this.totalWeight_;
}
getEstimate(): number {
if (this.alpha_) {
const zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
if (zeroFactor) {
return this.estimate_ / zeroFactor;
}
}
return this.estimate_;
}
}
export default EWMA;

322
server/node_modules/hls.js/src/utils/fetch-loader.ts generated vendored Normal file
View File

@@ -0,0 +1,322 @@
import {
LoaderCallbacks,
LoaderContext,
Loader,
LoaderStats,
LoaderConfiguration,
LoaderOnProgress,
LoaderResponse,
} from '../types/loader';
import { LoadStats } from '../loader/load-stats';
import ChunkCache from '../demux/chunk-cache';
export function fetchSupported() {
if (
// @ts-ignore
self.fetch &&
self.AbortController &&
self.ReadableStream &&
self.Request
) {
try {
new self.ReadableStream({}); // eslint-disable-line no-new
return true;
} catch (e) {
/* noop */
}
}
return false;
}
const BYTERANGE = /(\d+)-(\d+)\/(\d+)/;
class FetchLoader implements Loader<LoaderContext> {
private fetchSetup: Function;
private requestTimeout?: number;
private request: Request | null = null;
private response: Response | null = null;
private controller: AbortController;
public context: LoaderContext | null = null;
private config: LoaderConfiguration | null = null;
private callbacks: LoaderCallbacks<LoaderContext> | null = null;
public stats: LoaderStats;
private loader: Response | null = null;
constructor(config /* HlsConfig */) {
this.fetchSetup = config.fetchSetup || getRequest;
this.controller = new self.AbortController();
this.stats = new LoadStats();
}
destroy(): void {
this.loader =
this.callbacks =
this.context =
this.config =
this.request =
null;
this.abortInternal();
this.response = null;
// @ts-ignore
this.fetchSetup = this.controller = this.stats = null;
}
abortInternal(): void {
if (this.controller && !this.stats.loading.end) {
this.stats.aborted = true;
this.controller.abort();
}
}
abort(): void {
this.abortInternal();
if (this.callbacks?.onAbort) {
this.callbacks.onAbort(
this.stats,
this.context as LoaderContext,
this.response,
);
}
}
load(
context: LoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<LoaderContext>,
): void {
const stats = this.stats;
if (stats.loading.start) {
throw new Error('Loader can only be used once.');
}
stats.loading.start = self.performance.now();
const initParams = getRequestParameters(context, this.controller.signal);
const onProgress: LoaderOnProgress<LoaderContext> | undefined =
callbacks.onProgress;
const isArrayBuffer = context.responseType === 'arraybuffer';
const LENGTH = isArrayBuffer ? 'byteLength' : 'length';
const { maxTimeToFirstByteMs, maxLoadTimeMs } = config.loadPolicy;
this.context = context;
this.config = config;
this.callbacks = callbacks;
this.request = this.fetchSetup(context, initParams);
self.clearTimeout(this.requestTimeout);
config.timeout =
maxTimeToFirstByteMs && Number.isFinite(maxTimeToFirstByteMs)
? maxTimeToFirstByteMs
: maxLoadTimeMs;
this.requestTimeout = self.setTimeout(() => {
this.abortInternal();
callbacks.onTimeout(stats, context, this.response);
}, config.timeout);
self
.fetch(this.request as Request)
.then((response: Response): Promise<string | ArrayBuffer> => {
this.response = this.loader = response;
const first = Math.max(self.performance.now(), stats.loading.start);
self.clearTimeout(this.requestTimeout);
config.timeout = maxLoadTimeMs;
this.requestTimeout = self.setTimeout(
() => {
this.abortInternal();
callbacks.onTimeout(stats, context, this.response);
},
maxLoadTimeMs - (first - stats.loading.start),
);
if (!response.ok) {
const { status, statusText } = response;
throw new FetchError(
statusText || 'fetch, bad network response',
status,
response,
);
}
stats.loading.first = first;
stats.total = getContentLength(response.headers) || stats.total;
if (onProgress && Number.isFinite(config.highWaterMark)) {
return this.loadProgressively(
response,
stats,
context,
config.highWaterMark,
onProgress,
);
}
if (isArrayBuffer) {
return response.arrayBuffer();
}
if (context.responseType === 'json') {
return response.json();
}
return response.text();
})
.then((responseData: string | ArrayBuffer) => {
const response = this.response;
if (!response) {
throw new Error('loader destroyed');
}
self.clearTimeout(this.requestTimeout);
stats.loading.end = Math.max(
self.performance.now(),
stats.loading.first,
);
const total = responseData[LENGTH];
if (total) {
stats.loaded = stats.total = total;
}
const loaderResponse: LoaderResponse = {
url: response.url,
data: responseData,
code: response.status,
};
if (onProgress && !Number.isFinite(config.highWaterMark)) {
onProgress(stats, context, responseData, response);
}
callbacks.onSuccess(loaderResponse, stats, context, response);
})
.catch((error) => {
self.clearTimeout(this.requestTimeout);
if (stats.aborted) {
return;
}
// CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
// when destroying, 'error' itself can be undefined
const code: number = !error ? 0 : error.code || 0;
const text: string = !error ? null : error.message;
callbacks.onError(
{ code, text },
context,
error ? error.details : null,
stats,
);
});
}
getCacheAge(): number | null {
let result: number | null = null;
if (this.response) {
const ageHeader = this.response.headers.get('age');
result = ageHeader ? parseFloat(ageHeader) : null;
}
return result;
}
getResponseHeader(name: string): string | null {
return this.response ? this.response.headers.get(name) : null;
}
private loadProgressively(
response: Response,
stats: LoaderStats,
context: LoaderContext,
highWaterMark: number = 0,
onProgress: LoaderOnProgress<LoaderContext>,
): Promise<ArrayBuffer> {
const chunkCache = new ChunkCache();
const reader = (response.body as ReadableStream).getReader();
const pump = (): Promise<ArrayBuffer> => {
return reader
.read()
.then((data) => {
if (data.done) {
if (chunkCache.dataLength) {
onProgress(stats, context, chunkCache.flush(), response);
}
return Promise.resolve(new ArrayBuffer(0));
}
const chunk: Uint8Array = data.value;
const len = chunk.length;
stats.loaded += len;
if (len < highWaterMark || chunkCache.dataLength) {
// The current chunk is too small to to be emitted or the cache already has data
// Push it to the cache
chunkCache.push(chunk);
if (chunkCache.dataLength >= highWaterMark) {
// flush in order to join the typed arrays
onProgress(stats, context, chunkCache.flush(), response);
}
} else {
// If there's nothing cached already, and the chache is large enough
// just emit the progress event
onProgress(stats, context, chunk, response);
}
return pump();
})
.catch(() => {
/* aborted */
return Promise.reject();
});
};
return pump();
}
}
function getRequestParameters(context: LoaderContext, signal): any {
const initParams: any = {
method: 'GET',
mode: 'cors',
credentials: 'same-origin',
signal,
headers: new self.Headers(Object.assign({}, context.headers)),
};
if (context.rangeEnd) {
initParams.headers.set(
'Range',
'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1),
);
}
return initParams;
}
function getByteRangeLength(byteRangeHeader: string): number | undefined {
const result = BYTERANGE.exec(byteRangeHeader);
if (result) {
return parseInt(result[2]) - parseInt(result[1]) + 1;
}
}
function getContentLength(headers: Headers): number | undefined {
const contentRange = headers.get('Content-Range');
if (contentRange) {
const byteRangeLength = getByteRangeLength(contentRange);
if (Number.isFinite(byteRangeLength)) {
return byteRangeLength;
}
}
const contentLength = headers.get('Content-Length');
if (contentLength) {
return parseInt(contentLength);
}
}
function getRequest(context: LoaderContext, initParams: any): Request {
return new self.Request(context.url, initParams);
}
class FetchError extends Error {
public code: number;
public details: any;
constructor(message: string, code: number, details: any) {
super(message);
this.code = code;
this.details = details;
}
}
export default FetchLoader;

2
server/node_modules/hls.js/src/utils/global.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
/** returns `undefined` is `self` is missing, e.g. in node */
export const optionalSelf = typeof self !== 'undefined' ? self : undefined;

70
server/node_modules/hls.js/src/utils/hdr.ts generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import { type VideoRange, VideoRangeValues } from '../types/level';
import type { VideoSelectionOption } from '../types/media-playlist';
/**
* @returns Whether we can detect and validate HDR capability within the window context
*/
export function isHdrSupported() {
if (typeof matchMedia === 'function') {
const mediaQueryList = matchMedia('(dynamic-range: high)');
const badQuery = matchMedia('bad query');
if (mediaQueryList.media !== badQuery.media) {
return mediaQueryList.matches === true;
}
}
return false;
}
/**
* Sanitizes inputs to return the active video selection options for HDR/SDR.
* When both inputs are null:
*
* `{ preferHDR: false, allowedVideoRanges: [] }`
*
* When `currentVideoRange` non-null, maintain the active range:
*
* `{ preferHDR: currentVideoRange !== 'SDR', allowedVideoRanges: [currentVideoRange] }`
*
* When VideoSelectionOption non-null:
*
* - Allow all video ranges if `allowedVideoRanges` unspecified.
* - If `preferHDR` is non-null use the value to filter `allowedVideoRanges`.
* - Else check window for HDR support and set `preferHDR` to the result.
*
* @param currentVideoRange
* @param videoPreference
*/
export function getVideoSelectionOptions(
currentVideoRange: VideoRange | undefined,
videoPreference: VideoSelectionOption | undefined,
) {
let preferHDR = false;
let allowedVideoRanges: Array<VideoRange> = [];
if (currentVideoRange) {
preferHDR = currentVideoRange !== 'SDR';
allowedVideoRanges = [currentVideoRange];
}
if (videoPreference) {
allowedVideoRanges =
videoPreference.allowedVideoRanges || VideoRangeValues.slice(0);
preferHDR =
videoPreference.preferHDR !== undefined
? videoPreference.preferHDR
: isHdrSupported();
if (preferHDR) {
allowedVideoRanges = allowedVideoRanges.filter(
(range: VideoRange) => range !== 'SDR',
);
} else {
allowedVideoRanges = ['SDR'];
}
}
return {
preferHDR,
allowedVideoRanges,
};
}

Some files were not shown because too many files have changed in this diff Show More