libavjs-webcodecs-polyfill
Version:
A WebCodecs polyfill (ponyfill, really), using libav.js
1,141 lines (1,134 loc) • 199 kB
JavaScript
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.LibAVWebCodecs = {}));
})(this, (function (exports) { 'use strict';
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2021-2024 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
let EncodedAudioChunk$1 = class EncodedAudioChunk {
constructor(init) {
/* 1. If init.transfer contains more than one reference to the same
* ArrayBuffer, then throw a DataCloneError DOMException. */
// 2. For each transferable in init.transfer:
/* 1. If [[Detached]] internal slot is true, then throw a
* DataCloneError DOMException. */
// (not worth checking in a polyfill)
/* 3. Let chunk be a new EncodedAudioChunk object, initialized as
* follows */
{
// 1. Assign init.type to [[type]].
this.type = init.type;
// 2. Assign init.timestamp to [[timestamp]].
this.timestamp = init.timestamp;
/* 3. If init.duration exists, assign it to [[duration]], or assign
* null otherwise. */
if (typeof init.duration === "number")
this.duration = init.duration;
else
this.duration = null;
// 4. Assign init.data.byteLength to [[byte length]];
this.byteLength = init.data.byteLength;
/* 5. If init.transfer contains an ArrayBuffer referenced by
* init.data the User Agent MAY choose to: */
let transfer = false;
if (init.transfer) {
/* 1. Let resource be a new media resource referencing sample
* data in init.data. */
let inBuffer;
if (init.data.buffer)
inBuffer = init.data.buffer;
else
inBuffer = init.data;
let t;
if (init.transfer instanceof Array)
t = init.transfer;
else
t = Array.from(init.transfer);
for (const b of t) {
if (b === inBuffer) {
transfer = true;
break;
}
}
}
// 6. Otherwise:
// 1. Assign a copy of init.data to [[internal data]].
const data = new Uint8Array(init.data.buffer || init.data, init.data.byteOffset || 0, init.data.BYTES_PER_ELEMENT
? (init.data.BYTES_PER_ELEMENT * init.data.length)
: init.data.byteLength);
if (transfer)
this._data = data;
else
this._data = data.slice(0);
}
// 4. For each transferable in init.transfer:
// 1. Perform DetachArrayBuffer on transferable
// (already done by transferring)
// 5. Return chunk.
}
// Internal
_libavGetData() { return this._data; }
copyTo(destination) {
(new Uint8Array(destination.buffer || destination, destination.byteOffset || 0)).set(this._data);
}
};
(function (Object) {
typeof globalThis !== 'object' && (
this ?
get() :
(Object.defineProperty(Object.prototype, '_T_', {
configurable: true,
get: get
}), _T_)
);
function get() {
var global = this || self;
global.globalThis = global;
delete Object.prototype._T_;
}
}(Object));
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2021-2024 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
let AudioData$1 = class AudioData {
constructor(init) {
// 1. If init is not a valid AudioDataInit, throw a TypeError.
AudioData._checkValidAudioDataInit(init);
/* 2. If init.transfer contains more than one reference to the same
* ArrayBuffer, then throw a DataCloneError DOMException. */
// 3. For each transferable in init.transfer:
// 1. If [[Detached]] internal slot is true, then throw a DataCloneError DOMException.
// (Not worth doing in polyfill)
// 4. Let frame be a new AudioData object, initialized as follows:
{
// 1. Assign false to [[Detached]].
// (not doable in polyfill)
// 2. Assign init.format to [[format]].
this.format = init.format;
// 3. Assign init.sampleRate to [[sample rate]].
this.sampleRate = init.sampleRate;
// 4. Assign init.numberOfFrames to [[number of frames]].
this.numberOfFrames = init.numberOfFrames;
// 5. Assign init.numberOfChannels to [[number of channels]].
this.numberOfChannels = init.numberOfChannels;
// 6. Assign init.timestamp to [[timestamp]].
this.timestamp = init.timestamp;
/* 7. If init.transfer contains an ArrayBuffer referenced by
* init.data the User Agent MAY choose to: */
let transfer = false;
if (init.transfer) {
// 1. Let resource be a new media resource referencing sample data in data.
let inBuffer;
if (init.data.buffer)
inBuffer = init.data.buffer;
else
inBuffer = init.data;
let t;
if (init.transfer instanceof Array)
t = init.transfer;
else
t = Array.from(init.transfer);
for (const b of t) {
if (b === inBuffer) {
transfer = true;
break;
}
}
}
// 8. Otherwise:
// 1. Let resource be a media resource containing a copy of init.data.
// 9. Let resourceReference be a reference to resource.
let inData, byteOffset = 0;
if (transfer) {
inData = init.data;
byteOffset = init.data.byteOffset || 0;
}
else {
inData = init.data.slice(0);
}
const resourceReference = audioView(init.format, inData.buffer || inData, byteOffset);
// 10. Assign resourceReference to [[resource reference]].
this._data = resourceReference;
}
// 5. For each transferable in init.transfer:
// 1. Perform DetachArrayBuffer on transferable
// (Already done by transferring)
// 6. Return frame.
// Duration not calculated in spec?
this.duration = init.numberOfFrames / init.sampleRate * 1000000;
}
/**
* Convert a polyfill AudioData to a native AudioData.
* @param opts Conversion options
*/
toNative(opts = {}) {
const ret = new globalThis.AudioData({
data: this._data,
format: this.format,
sampleRate: this.sampleRate,
numberOfFrames: this.numberOfFrames,
numberOfChannels: this.numberOfChannels,
timestamp: this.timestamp,
transfer: opts.transfer ? [this._data.buffer] : []
});
if (opts.transfer)
this.close();
return ret;
}
/**
* Convert a native AudioData to a polyfill AudioData. WARNING: Inefficient,
* as the data cannot be transferred out.
* @param from AudioData to copy in
*/
static fromNative(from /* native AudioData */) {
const ad = from;
const isInterleaved_ = isInterleaved(ad.format);
const planes = isInterleaved_ ? 1 : ad.numberOfChannels;
const sizePerPlane = ad.allocationSize({
format: ad.format,
planeIndex: 0
});
const data = new Uint8Array(sizePerPlane * planes);
for (let p = 0; p < planes; p++) {
ad.copyTo(data.subarray(p * sizePerPlane), {
format: ad.format,
planeIndex: p
});
}
return new AudioData({
data,
format: ad.format,
sampleRate: ad.sampleRate,
numberOfFrames: ad.numberOfFrames,
numberOfChannels: ad.numberOfChannels,
timestamp: ad.timestamp,
transfer: [data.buffer]
});
}
// Internal
_libavGetData() { return this._data; }
static _checkValidAudioDataInit(init) {
// 1. If sampleRate less than or equal to 0, return false.
if (init.sampleRate <= 0)
throw new TypeError(`Invalid sample rate ${init.sampleRate}`);
// 2. If numberOfFrames = 0, return false.
if (init.numberOfFrames <= 0)
throw new TypeError(`Invalid number of frames ${init.numberOfFrames}`);
// 3. If numberOfChannels = 0, return false.
if (init.numberOfChannels <= 0)
throw new TypeError(`Invalid number of channels ${init.numberOfChannels}`);
// 4. Verify data has enough data by running the following steps:
{
// 1. Let totalSamples be the product of multiplying numberOfFrames by numberOfChannels.
const totalSamples = init.numberOfFrames * init.numberOfChannels;
// 2. Let bytesPerSample be the number of bytes per sample, as defined by the format.
const bytesPerSample_ = bytesPerSample(init.format);
// 3. Let totalSize be the product of multiplying bytesPerSample with totalSamples.
const totalSize = bytesPerSample_ * totalSamples;
// 4. Let dataSize be the size in bytes of data.
const dataSize = init.data.byteLength;
// 5. If dataSize is less than totalSize, return false.
if (dataSize < totalSize)
throw new TypeError(`This audio data must be at least ${totalSize} bytes`);
}
// 5. Return true.
}
allocationSize(options) {
// 1. If [[Detached]] is true, throw an InvalidStateError DOMException.
if (this._data === null)
throw new DOMException("Detached", "InvalidStateError");
/* 2. Let copyElementCount be the result of running the Compute Copy
* Element Count algorithm with options. */
const copyElementCount = this._computeCopyElementCount(options);
// 3. Let destFormat be the value of [[format]].
let destFormat = this.format;
// 4. If options.format exists, assign options.format to destFormat.
if (options.format)
destFormat = options.format;
/* 5. Let bytesPerSample be the number of bytes per sample, as defined
* by the destFormat. */
const bytesPerSample_ = bytesPerSample(destFormat);
/* 6. Return the product of multiplying bytesPerSample by
* copyElementCount. */
return bytesPerSample_ * copyElementCount;
}
_computeCopyElementCount(options) {
// 1. Let destFormat be the value of [[format]].
let destFormat = this.format;
// 2. If options.format exists, assign options.format to destFormat.
if (options.format)
destFormat = options.format;
/* 3. If destFormat describes an interleaved AudioSampleFormat and
* options.planeIndex is greater than 0, throw a RangeError. */
const isInterleaved_ = isInterleaved(destFormat);
if (isInterleaved_) {
if (options.planeIndex > 0)
throw new RangeError("Invalid plane");
}
/* 4. Otherwise, if destFormat describes a planar AudioSampleFormat and
* if options.planeIndex is greater or equal to [[number of channels]],
* throw a RangeError. */
else if (options.planeIndex >= this.numberOfChannels)
throw new RangeError("Invalid plane");
/* 5. If [[format]] does not equal destFormat and the User Agent does
* not support the requested AudioSampleFormat conversion, throw a
* NotSupportedError DOMException. Conversion to f32-planar MUST always
* be supported. */
if (this.format !== destFormat &&
destFormat !== "f32-planar")
throw new DOMException("Only conversion to f32-planar is supported", "NotSupportedError");
/* 6. Let frameCount be the number of frames in the plane identified by
* options.planeIndex. */
const frameCount = this.numberOfFrames; // All planes have the same number of frames
/* 7. If options.frameOffset is greater than or equal to frameCount,
* throw a RangeError. */
const frameOffset = options.frameOffset || 0;
if (frameOffset >= frameCount)
throw new RangeError("Frame offset out of range");
/* 8. Let copyFrameCount be the difference of subtracting
* options.frameOffset from frameCount. */
let copyFrameCount = frameCount - frameOffset;
// 9. If options.frameCount exists:
if (typeof options.frameCount === "number") {
/* 1. If options.frameCount is greater than copyFrameCount, throw a
* RangeError. */
if (options.frameCount >= copyFrameCount)
throw new RangeError("Frame count out of range");
// 2. Otherwise, assign options.frameCount to copyFrameCount.
copyFrameCount = options.frameCount;
}
// 10. Let elementCount be copyFrameCount.
let elementCount = copyFrameCount;
/* 11. If destFormat describes an interleaved AudioSampleFormat,
* mutliply elementCount by [[number of channels]] */
if (isInterleaved_)
elementCount *= this.numberOfChannels;
// 12. return elementCount.
return elementCount;
}
copyTo(destination, options) {
// 1. If [[Detached]] is true, throw an InvalidStateError DOMException.
if (this._data === null)
throw new DOMException("Detached", "InvalidStateError");
/* 2. Let copyElementCount be the result of running the Compute Copy
* Element Count algorithm with options. */
const copyElementCount = this._computeCopyElementCount(options);
// 3. Let destFormat be the value of [[format]].
let destFormat = this.format;
// 4. If options.format exists, assign options.format to destFormat.
if (options.format)
destFormat = options.format;
/* 5. Let bytesPerSample be the number of bytes per sample, as defined
* by the destFormat. */
const bytesPerSample_ = bytesPerSample(destFormat);
/* 6. If the product of multiplying bytesPerSample by copyElementCount
* is greater than destination.byteLength, throw a RangeError. */
if (bytesPerSample_ * copyElementCount > destination.byteLength)
throw new RangeError("Buffer too small");
/* 7. Let resource be the media resource referenced by [[resource
* reference]]. */
const resource = this._data;
/* 8. Let planeFrames be the region of resource corresponding to
* options.planeIndex. */
const planeFrames = resource.subarray(options.planeIndex * this.numberOfFrames);
const frameOffset = options.frameOffset || 0;
const numberOfChannels = this.numberOfChannels;
/* 9. Copy elements of planeFrames into destination, starting with the
* frame positioned at options.frameOffset and stopping after
* copyElementCount samples have been copied. If destFormat does not
* equal [[format]], convert elements to the destFormat
* AudioSampleFormat while making the copy. */
if (this.format === destFormat) {
const dest = audioView(destFormat, destination.buffer || destination, destination.byteOffset || 0);
if (isInterleaved(destFormat)) {
dest.set(planeFrames.subarray(frameOffset * numberOfChannels, frameOffset * numberOfChannels + copyElementCount));
}
else {
dest.set(planeFrames.subarray(frameOffset, frameOffset + copyElementCount));
}
}
else {
// Actual conversion necessary. Always to f32-planar.
const out = audioView(destFormat, destination.buffer || destination, destination.byteOffset || 0);
// First work out the conversion
let sub = 0;
let div = 1;
switch (this.format) {
case "u8":
case "u8-planar":
sub = 0x80;
div = 0x80;
break;
case "s16":
case "s16-planar":
div = 0x8000;
break;
case "s32":
case "s32-planar":
div = 0x80000000;
break;
}
// Then do it
if (isInterleaved(this.format)) {
for (let i = options.planeIndex + frameOffset * numberOfChannels, o = 0; o < copyElementCount; i += numberOfChannels, o++)
out[o] = (planeFrames[i] - sub) / div;
}
else {
for (let i = frameOffset, o = 0; o < copyElementCount; i++, o++)
out[o] = (planeFrames[i] - sub) / div;
}
}
}
clone() {
// 1. If [[Detached]] is true, throw an InvalidStateError DOMException.
if (this._data === null)
throw new DOMException("Detached", "InvalidStateError");
/* 2. Return the result of running the Clone AudioData algorithm with
* this. */
return new AudioData({
format: this.format,
sampleRate: this.sampleRate,
numberOfFrames: this.numberOfFrames,
numberOfChannels: this.numberOfChannels,
timestamp: this.timestamp,
data: this._data
});
}
close() {
this._data = null;
}
};
/**
* Construct the appropriate type of ArrayBufferView for the given sample
* format and buffer.
* @param format Sample format
* @param buffer ArrayBuffer (NOT view)
* @param byteOffset Offset into the buffer
*/
function audioView(format, buffer, byteOffset) {
switch (format) {
case "u8":
case "u8-planar":
return new Uint8Array(buffer, byteOffset);
case "s16":
case "s16-planar":
return new Int16Array(buffer, byteOffset);
case "s32":
case "s32-planar":
return new Int32Array(buffer, byteOffset);
case "f32":
case "f32-planar":
return new Float32Array(buffer, byteOffset);
default:
throw new TypeError("Invalid AudioSampleFormat");
}
}
/**
* Number of bytes per sample of this format.
* @param format Sample format
*/
function bytesPerSample(format) {
switch (format) {
case "u8":
case "u8-planar":
return 1;
case "s16":
case "s16-planar":
return 2;
case "s32":
case "s32-planar":
case "f32":
case "f32-planar":
return 4;
default:
throw new TypeError("Invalid AudioSampleFormat");
}
}
/**
* Is this format interleaved?
* @param format Sample format
*/
function isInterleaved(format) {
switch (format) {
case "u8":
case "s16":
case "s32":
case "f32":
return true;
case "u8-planar":
case "s16-planar":
case "s32-planar":
case "f32-planar":
return false;
default:
throw new TypeError("Invalid AudioSampleFormat");
}
}
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2024 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/* Unfortunately, browsers don't let us extend EventTarget. So, we implement an
* EventTarget interface with a “has-a” relationship instead of an “is-a”
* relationship. We have an event target, and expose its event functions as our
* own. */
class HasAEventTarget {
constructor() {
const ev = this._eventer = new EventTarget();
this.addEventListener = ev.addEventListener.bind(ev);
this.removeEventListener = ev.removeEventListener.bind(ev);
this.dispatchEvent = ev.dispatchEvent.bind(ev);
}
}
class DequeueEventTarget extends HasAEventTarget {
constructor() {
super();
this.addEventListener("dequeue", ev => {
if (this.ondequeue)
this.ondequeue(ev);
});
}
}
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2021-2024 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
var __awaiter$8 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
// Wrapper function to use
let LibAVWrapper = null;
// Currently available libav instances
const libavs = [];
// Options required to create a LibAV instance
let libavOptions = {};
/**
* Supported decoders.
*/
let decoders = null;
/**
* Supported encoders.
*/
let encoders = null;
/**
* Set the libav wrapper to use.
*/
function setLibAV(to) {
LibAVWrapper = to;
}
/**
* Set the libav loading options.
*/
function setLibAVOptions(to) {
libavOptions = to;
}
/**
* Get a libav instance.
*/
function get() {
return __awaiter$8(this, void 0, void 0, function* () {
if (libavs.length)
return libavs.shift();
return yield LibAVWrapper.LibAV(libavOptions);
});
}
/**
* Free a libav instance for later reuse.
*/
function free(libav) {
libavs.push(libav);
}
/**
* Get the list of encoders/decoders supported by libav (which are also
* supported by this polyfill)
* @param encoders Check for encoders instead of decoders
*/
function codecs(encoders) {
return __awaiter$8(this, void 0, void 0, function* () {
const libav = yield get();
const ret = [];
for (const [avname, codec] of [
["flac", "flac"],
["libopus", "opus"],
["libvorbis", "vorbis"],
["libaom-av1", "av01"],
["libvpx-vp9", "vp09"],
["libvpx", "vp8"]
]) {
if (encoders) {
if (yield libav.avcodec_find_encoder_by_name(avname))
ret.push(codec);
}
else {
if (yield libav.avcodec_find_decoder_by_name(avname))
ret.push(codec);
}
}
free(libav);
return ret;
});
}
/**
* Load the lists of supported decoders and encoders.
*/
function load$2() {
return __awaiter$8(this, void 0, void 0, function* () {
LibAVWrapper = LibAVWrapper || LibAV;
decoders = yield codecs(false);
encoders = yield codecs(true);
});
}
/**
* Convert a decoder from the codec registry (or libav.js-specific parameters)
* to libav.js. Returns null if unsupported.
*/
function decoder(codec, config) {
if (typeof codec === "string") {
codec = codec.replace(/\..*/, "");
let outCodec = codec;
switch (codec) {
// Audio
case "flac":
if (typeof config.description === "undefined") {
// description is required per spec, but one can argue, if this limitation makes sense
return null;
}
break;
case "opus":
if (typeof config.description !== "undefined") {
// ogg bitstream is not supported by the current implementation
return null;
}
outCodec = "libopus";
break;
case "vorbis":
if (typeof config.description === "undefined") {
// description is required per spec, but one can argue, if this limitation makes sense
return null;
}
outCodec = "libvorbis";
break;
// Video
case "av01":
outCodec = "libaom-av1";
break;
case "vp09":
outCodec = "libvpx-vp9";
break;
case "vp8":
outCodec = "libvpx";
break;
// Unsupported
case "mp3":
case "mp4a":
case "ulaw":
case "alaw":
case "avc1":
case "avc3":
case "hev1":
case "hvc1":
return null;
// Unrecognized
default:
throw new TypeError("Unrecognized codec");
}
// Check whether we actually support this codec
if (!(decoders.indexOf(codec) >= 0))
return null;
return { codec: outCodec };
}
else {
return codec.libavjs;
}
}
/**
* Convert an encoder from the codec registry (or libav.js-specific parameters)
* to libav.js. Returns null if unsupported.
*/
function encoder(codec, config) {
if (typeof codec === "string") {
const codecParts = codec.split(".");
codec = codecParts[0];
let outCodec = codec;
const ctx = {};
const options = {};
let video = false;
switch (codec) {
// Audio
case "flac":
ctx.sample_fmt = 2 /* S32 */;
ctx.bit_rate = 0;
if (typeof config.flac === "object" &&
config.flac !== null) {
const flac = config.flac;
// FIXME: Check block size
if (typeof flac.blockSize === "number")
ctx.frame_size = flac.blockSize;
if (typeof flac.compressLevel === "number") {
// Not supported
return null;
}
}
break;
case "opus":
outCodec = "libopus";
ctx.sample_fmt = 3 /* FLT */;
ctx.sample_rate = 48000;
if (typeof config.opus === "object" &&
config.opus !== null) {
const opus = config.opus;
// FIXME: Check frame duration
if (typeof opus.frameDuration === "number")
options.frame_duration = "" + (opus.frameDuration / 1000);
if (typeof opus.complexity !== "undefined") {
// We don't support the complexity option
return null;
}
if (typeof opus.packetlossperc === "number") {
if (opus.packetlossperc < 0 || opus.packetlossperc > 100)
return null;
options.packet_loss = "" + opus.packetlossperc;
}
if (typeof opus.useinbandfec === "boolean")
options.fec = opus.useinbandfec ? "1" : "0";
if (typeof opus.usedtx === "boolean") {
// We don't support the usedtx option
return null;
}
if (typeof opus.format === "string") {
// ogg bitstream is not supported
if (opus.format !== "opus")
return null;
}
}
break;
case "vorbis":
outCodec = "libvorbis";
ctx.sample_fmt = 8 /* FLTP */;
break;
// Video
case "av01":
video = true;
outCodec = "libaom-av1";
if (config.latencyMode === "realtime") {
options.usage = "realtime";
options["cpu-used"] = "8";
}
// Check for advanced options
if (!av1Advanced(codecParts, ctx))
return null;
break;
case "vp09":
video = true;
outCodec = "libvpx-vp9";
if (config.latencyMode === "realtime") {
options.quality = "realtime";
options["cpu-used"] = "8";
}
// Check for advanced options
if (!vp9Advanced(codecParts, ctx))
return null;
break;
case "vp8":
video = true;
outCodec = "libvpx";
if (config.latencyMode === "realtime") {
options.quality = "realtime";
options["cpu-used"] = "8";
}
break;
// Unsupported
case "mp3":
case "mp4a":
case "ulaw":
case "alaw":
case "avc1":
return null;
// Unrecognized
default:
throw new TypeError("Unrecognized codec");
}
// Check whether we actually support this codec
if (!(encoders.indexOf(codec) >= 0))
return null;
if (video) {
if (typeof ctx.pix_fmt !== "number")
ctx.pix_fmt = 0 /* YUV420P */;
const width = ctx.width = config.width;
const height = ctx.height = config.height;
if (config.framerate) {
/* FIXME: We need this as a rational, not a floating point, and
* this is obviously not the right way to do it */
ctx.framerate_num = Math.round(config.framerate);
ctx.framerate_den = 1;
}
// Check for non-square pixels
const dWidth = config.displayWidth || config.width;
const dHeight = config.displayHeight || config.height;
if (dWidth !== width || dHeight !== height) {
ctx.sample_aspect_ratio_num = dWidth * height;
ctx.sample_aspect_ratio_den = dHeight * width;
}
}
else {
if (!ctx.sample_rate)
ctx.sample_rate = config.sampleRate || 48000;
if (config.numberOfChannels) {
const n = config.numberOfChannels;
ctx.channel_layout = (n === 1) ? 4 : ((1 << n) - 1);
}
}
if (typeof ctx.bit_rate !== "number" && config.bitrate) {
// NOTE: CBR requests are, quite rightly, ignored
ctx.bit_rate = config.bitrate;
}
return {
codec: outCodec,
ctx, options
};
}
else {
return codec.libavjs;
}
}
/**
* Handler for advanced options for AV1.
* @param codecParts .-separated parts of the codec string.
* @param ctx Context to populate with advanced options.
*/
function av1Advanced(codecParts, ctx) {
if (codecParts[1]) {
const profile = +codecParts[1];
if (profile >= 0 && profile <= 2)
ctx.profile = profile;
else
throw new TypeError("Invalid AV1 profile");
}
if (codecParts[2]) {
const level = +codecParts[2];
if (level >= 0 && level <= 23)
ctx.level = level;
else
throw new TypeError("Invalid AV1 level");
}
if (codecParts[3]) {
switch (codecParts[3]) {
case "M":
// Default
break;
case "H":
if (ctx.level && ctx.level >= 8) {
// Valid but unsupported
return false;
}
else {
throw new TypeError("The AV1 high tier is only available for level 4.0 and up");
}
default:
throw new TypeError("Invalid AV1 tier");
}
}
if (codecParts[4]) {
const depth = +codecParts[3];
if (depth === 10 || depth === 12) {
// Valid but unsupported
return false;
}
else if (depth !== 8) {
throw new TypeError("Invalid AV1 bit depth");
}
}
if (codecParts[5]) {
// Monochrome
switch (codecParts[5]) {
case "0":
// Default
break;
case "1":
// Valid but unsupported
return false;
default:
throw new TypeError("Invalid AV1 monochrome flag");
}
}
if (codecParts[6]) {
// Subsampling mode
switch (codecParts[6]) {
case "000": // YUV444
ctx.pix_fmt = 5 /* YUV444P */;
break;
case "100": // YUV422
ctx.pix_fmt = 4 /* YUV422P */;
break;
case "110": // YUV420P (default)
ctx.pix_fmt = 0 /* YUV420P */;
break;
case "111": // Monochrome
return false;
default:
throw new TypeError("Invalid AV1 subsampling mode");
}
}
/* The remaining values have to do with color formats, which we don't
* support correctly anyway */
return true;
}
/**
* Handler for advanced options for VP9.
* @param codecParts .-separated parts of the codec string.
* @param ctx Context to populate with advanced options.
*/
function vp9Advanced(codecParts, ctx) {
if (codecParts[1]) {
const profile = +codecParts[1];
if (profile >= 0 && profile <= 3)
ctx.profile = profile;
else
throw new TypeError("Invalid VP9 profile");
}
if (codecParts[2]) {
const level = [+codecParts[2][0], +codecParts[2][1]];
if (level[0] >= 1 && level[0] <= 4) {
if (level[1] >= 0 && level[1] <= 1) ;
else {
throw new TypeError("Invalid VP9 level");
}
}
else if (level[0] >= 5 && level[0] <= 6) {
if (level[1] >= 0 && level[1] <= 2) ;
else {
throw new TypeError("Invalid VP9 level");
}
}
else {
throw new TypeError("Invalid VP9 level");
}
ctx.level = +codecParts[2];
}
if (codecParts[3]) {
const depth = +codecParts[3];
if (depth === 10 || depth === 12) {
// Valid but unsupported
return false;
}
else if (depth !== 8) {
throw new TypeError("Invalid VP9 bit depth");
}
}
if (codecParts[4]) {
const chromaMode = +codecParts[4];
switch (chromaMode) {
case 0:
case 1:
// FIXME: These are subtly different YUV420P modes, but we treat them the same
ctx.pix_fmt = 0 /* YUV420P */;
break;
case 2: // YUV422
ctx.pix_fmt = 4 /* YUV422P */;
break;
case 3: // YUV444
ctx.pix_fmt = 5 /* YUV444P */;
break;
default:
throw new TypeError("Invalid VP9 chroma subsampling format");
}
}
/* The remaining values have to do with color formats, which we don't
* support correctly anyway */
return true;
}
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2021 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/**
* Clone this configuration. Just copies over the supported/recognized fields.
*/
function cloneConfig(config, fields) {
const ret = {};
for (const field of fields) {
if (field in config)
ret[field] = config[field];
}
return ret;
}
/*
* This file is part of the libav.js WebCodecs Polyfill implementation. The
* interface implemented is derived from the W3C standard. No attribution is
* required when using this library.
*
* Copyright (c) 2021-2024 Yahweasel
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
var __awaiter$7 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
let AudioDecoder$1 = class AudioDecoder extends DequeueEventTarget {
constructor(init) {
super();
// 1. Let d be a new AudioDecoder object.
// 2. Assign a new queue to [[control message queue]].
this._p = Promise.all([]);
// 3. Assign false to [[message queue blocked]].
// (unused in polyfill)
// 4. Assign null to [[codec implementation]].
this._libav = null;
this._codec = this._c = this._pkt = this._frame = 0;
// 5. Assign the result of starting a new parallel queue to [[codec work queue]].
// (shared with control message queue)
// 6. Assign false to [[codec saturated]].
// (codec is never saturated)
// 7. Assign init.output to [[output callback]].
this._output = init.output;
// 8. Assign init.error to [[error callback]].
this._error = init.error;
// 9. Assign true to [[key chunk required]].
// (implicit part of the underlying codec)
// 10. Assign "unconfigured" to [[state]]
this.state = "unconfigured";
// 11. Assign 0 to [[decodeQueueSize]].
this.decodeQueueSize = 0;
// 12. Assign a new list to [[pending flush promises]].
// (shared with control message queue)
// 13. Assign false to [[dequeue event scheduled]].
// (shared with control message queue)
// 14. Return d.
}
configure(config) {
// 1. If config is not a valid AudioDecoderConfig, throw a TypeError.
// NOTE: We don't support sophisticated codec string parsing (yet)
// 2. If [[state]] is “closed”, throw an InvalidStateError DOMException.
if (this.state === "closed")
throw new DOMException("Decoder is closed", "InvalidStateError");
// Free any internal state
if (this._libav)
this._p = this._p.then(() => this._free());
// 3. Set [[state]] to "configured".
this.state = "configured";
// 4. Set [[key chunk required]] to true.
// (implicit part of underlying codecs)
// 5. Queue a control message to configure the decoder with config.
this._p = this._p.then(() => __awaiter$7(this, void 0, void 0, function* () {
/* 1. Let supported be the result of running the Check
* Configuration Support algorithm with config. */
let udesc = void 0;
if (config.description) {
if (ArrayBuffer.isView(config.description)) {
const descView = config.description;
udesc = new Uint8Array(descView.buffer, descView.byteOffset, descView.byteLength);
}
else {
const descBuf = config.description;
udesc = new Uint8Array(descBuf);
}
}
const supported = decoder(config.codec, config);
/* 2. If supported is false, queue a task to run the Close
* AudioDecoder algorithm with NotSupportedError and abort these
* steps. */
if (!supported) {
this._closeAudioDecoder(new DOMException("Unsupported codec", "NotSupportedError"));
return;
}
/* 3. If needed, assign [[codec implementation]] with an
* implementation supporting config. */
const libav = this._libav = yield get();
const codecpara = yield libav.avcodec_parameters_alloc();
const ps = [
libav.AVCodecParameters_channels_s(codecpara, config.numberOfChannels),
libav.AVCodecParameters_sample_rate_s(codecpara, config.sampl