(self["webpackChunkvideos"] = self["webpackChunkvideos"] || []).push([[537],{
/***/ 3638:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
/**
* Inlined from https://github.com/Jam3/audiobuffer-to-wav/commit/2272eb09bd46a05e50a6d684d908aa6f13c58f63#diff-e727e4bdf3657fd1d798edcd6b099d6e092f8573cba266154583a746bba0f346
*/
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.audioBufferToWav = audioBufferToWav;
function interleave(inputL, inputR) {
const length = inputL.length + inputR.length;
const result = new Float32Array(length);
let index = 0;
let inputIndex = 0;
while (index < length) {
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
return result;
}
function writeFloat32(output, offset, input) {
for (let i = 0; i < input.length; i++, offset += 4) {
output.setFloat32(offset, input[i], true);
}
}
function floatTo16BitPCM(output, offset, input) {
for (let i = 0; i < input.length; i++, offset += 2) {
const s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
}
}
function writeString(view, offset, string) {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function encodeWAV({ samples, format, sampleRate, numChannels, bitDepth, }) {
const bytesPerSample = bitDepth / 8;
const blockAlign = numChannels * bytesPerSample;
const buffer = new ArrayBuffer(44 + samples.length * bytesPerSample);
const view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* RIFF chunk length */
view.setUint32(4, 36 + samples.length * bytesPerSample, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, format, true);
/* channel count */
view.setUint16(22, numChannels, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * blockAlign, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, blockAlign, true);
/* bits per sample */
view.setUint16(34, bitDepth, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * bytesPerSample, true);
if (format === 1) {
// Raw PCM
floatTo16BitPCM(view, 44, samples);
}
else {
writeFloat32(view, 44, samples);
}
return buffer;
}
function audioBufferToWav(buffer, opt) {
const numChannels = buffer.numberOfChannels;
const { sampleRate } = buffer;
const format = opt.float32 ? 3 : 1;
const bitDepth = format === 3 ? 32 : 16;
let result;
if (numChannels === 2) {
result = interleave(buffer.getChannelData(0), buffer.getChannelData(1));
}
else {
result = buffer.getChannelData(0);
}
return encodeWAV({
samples: result,
format,
sampleRate,
numChannels,
bitDepth,
});
}
/***/ }),
/***/ 7996:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.audioBufferToDataUrl = void 0;
const audio_buffer_to_wav_1 = __webpack_require__(3638);
/*
* @description Takes an AudioBuffer instance and converts it to a Base 64 Data URL so it can be passed to an tag.
* @see [Documentation](https://remotion.dev/docs/audio-buffer-to-data-url)
*/
const audioBufferToDataUrl = (buffer) => {
const wavAsArrayBuffer = (0, audio_buffer_to_wav_1.audioBufferToWav)(buffer, {
float32: true,
});
let binary = '';
const bytes = new Uint8Array(wavAsArrayBuffer);
const len = bytes.byteLength;
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return 'data:audio/wav;base64,' + window.btoa(binary);
};
exports.audioBufferToDataUrl = audioBufferToDataUrl;
/***/ }),
/***/ 1546:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.combineFloat32Arrays = void 0;
const combineFloat32Arrays = (arrays) => {
if (arrays.length === 0) {
return new Float32Array([]);
}
if (arrays.length === 1) {
return arrays[0];
}
let totalLength = 0;
for (const array of arrays) {
totalLength += array.length;
}
const result = new Float32Array(totalLength);
let offset = 0;
for (const array of arrays) {
result.set(array, offset);
offset += array.length;
}
return result;
};
exports.combineFloat32Arrays = combineFloat32Arrays;
/***/ }),
/***/ 9144:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createSmoothSvgPath = void 0;
const line = (pointA, pointB) => {
const lengthX = pointB.x - pointA.x;
const lengthY = pointB.y - pointA.y;
return {
length: Math.sqrt(lengthX ** 2 + lengthY ** 2),
angle: Math.atan2(lengthY, lengthX),
};
};
const controlPoint = ({ current, previous, next, reverse, }) => {
const p = previous || current;
const n = next || current;
// The smoothing ratio
const smoothing = 0.2;
// Properties of the opposed-line
const o = line(p, n);
const angle = o.angle + (reverse ? Math.PI : 0);
const length = o.length * smoothing;
const x = current.x + Math.cos(angle) * length;
const y = current.y + Math.sin(angle) * length;
return { x, y };
};
const createSmoothSvgPath = ({ points }) => {
return points.reduce((acc, current, i, a) => {
if (i === 0) {
return `M ${current.x},${current.y}`;
}
const { x, y } = current;
const previous = a[i - 1];
const twoPrevious = a[i - 2];
const next = a[i + 1];
const { x: cp1x, y: cp1y } = controlPoint({
current: previous,
previous: twoPrevious,
next: current,
reverse: false,
});
const { x: cp2x, y: cp2y } = controlPoint({
current,
previous,
next,
reverse: true,
});
return `${acc} C ${cp1x},${cp1y} ${cp2x},${cp2y} ${x},${y}`;
}, '');
};
exports.createSmoothSvgPath = createSmoothSvgPath;
/***/ }),
/***/ 5633:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fetchWithCorsCatch = void 0;
const fetchWithCorsCatch = async (src, init) => {
try {
const response = await fetch(src, {
mode: 'cors',
referrerPolicy: 'no-referrer-when-downgrade',
...init,
});
return response;
}
catch (err) {
const error = err;
if (
// Chrome
error.message.includes('Failed to fetch') ||
// Safari
error.message.includes('Load failed') ||
// Firefox
error.message.includes('NetworkError when attempting to fetch resource')) {
throw new TypeError(`Failed to read from ${src}: ${error.message}. Does the resource support CORS?`);
}
throw err;
}
};
exports.fetchWithCorsCatch = fetchWithCorsCatch;
/***/ }),
/***/ 1603:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.complexMagnitude = exports.complexMultiply = exports.complexSubtract = exports.complexAdd = void 0;
const complexAdd = function (a, b) {
return [a[0] + b[0], a[1] + b[1]];
};
exports.complexAdd = complexAdd;
const complexSubtract = function (a, b) {
return [a[0] - b[0], a[1] - b[1]];
};
exports.complexSubtract = complexSubtract;
const complexMultiply = function (a, b) {
return [a[0] * b[0] - a[1] * b[1], a[0] * b[1] + a[1] * b[0]];
};
exports.complexMultiply = complexMultiply;
const complexMagnitude = function (c) {
return Math.sqrt(c[0] * c[0] + c[1] * c[1]);
};
exports.complexMagnitude = complexMagnitude;
/***/ }),
/***/ 5372:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.exponent = void 0;
const mapExponent = {};
const exponent = function (k, N) {
const x = -2 * Math.PI * (k / N);
mapExponent[N] = mapExponent[N] || {};
mapExponent[N][k] = mapExponent[N][k] || [Math.cos(x), Math.sin(x)]; // [Real, Imaginary]
return mapExponent[N][k];
};
exports.exponent = exponent;
/***/ }),
/***/ 4750:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fftAccurate = void 0;
const complex_1 = __webpack_require__(1603);
const exponent_1 = __webpack_require__(5372);
const fftAccurate = function (vector) {
const X = [];
const N = vector.length;
// Base case is X = x + 0i since our input is assumed to be real only.
if (N === 1) {
if (Array.isArray(vector[0])) {
// If input vector contains complex numbers
return [[vector[0][0], vector[0][1]]];
}
return [[vector[0], 0]];
}
// Recurse: all even samples
const X_evens = (0, exports.fftAccurate)(vector.filter((_, ix) => ix % 2 === 0));
// Recurse: all odd samples
const X_odds = (0, exports.fftAccurate)(vector.filter((__, ix) => ix % 2 === 1));
// Now, perform N/2 operations!
for (let k = 0; k < N / 2; k++) {
// t is a complex number!
const t = X_evens[k];
const e = (0, complex_1.complexMultiply)((0, exponent_1.exponent)(k, N), X_odds[k]);
X[k] = (0, complex_1.complexAdd)(t, e);
X[k + N / 2] = (0, complex_1.complexSubtract)(t, e);
}
return X;
};
exports.fftAccurate = fftAccurate;
/***/ }),
/***/ 5226:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// https://pastebin.com/raw/D42RbPe5
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fftFast = void 0;
// Function to reverse bits in an integer
function reverseBits(num, numBits) {
let result = 0;
for (let i = 0; i < numBits; i++) {
result = (result << 1) | ((num >> i) & 1);
}
return result;
}
// Hamming window function
function hammingWindow(N) {
const win = new Array(N);
for (let i = 0; i < N; i++) {
win[i] = 0.8 - 0.46 * Math.cos((2 * Math.PI * i) / (N - 1));
}
return win;
}
// Function to calculate the bit-reversed permutation indices
function bitReversePermutation(N) {
const bitReversed = new Array(N);
for (let i = 0; i < N; i++) {
bitReversed[i] = reverseBits(i, Math.log2(N));
}
return bitReversed;
}
const fftFast = function (vector) {
const N = vector.length;
const X = new Array(N);
if (N <= 1) {
for (let i = 0; i < vector.length; i++) {
const value = vector[i];
X[i] = [value * 2, 0];
}
return X;
}
// Apply a windowing function to the input data
const window = hammingWindow(N); // You can choose a different window function if needed
for (let i = 0; i < N; i++) {
X[i] = [vector[i] * window[i], 0];
}
// Bit-Reversal Permutation
const bitReversed = bitReversePermutation(N);
for (let i = 0; i < N; i++) {
X[i] = [vector[bitReversed[i]], 0];
}
// Cooley-Tukey FFT
for (let s = 1; s <= Math.log2(N); s++) {
const m = 1 << s; // Number of elements in each subarray
const mHalf = m / 2; // Half the number of elements in each subarray
const angleIncrement = (2 * Math.PI) / m;
for (let k = 0; k < N; k += m) {
let omegaReal = 1.0;
let omegaImag = 0.0;
for (let j = 0; j < mHalf; j++) {
const tReal = omegaReal * X[k + j + mHalf][0] - omegaImag * X[k + j + mHalf][1];
const tImag = omegaReal * X[k + j + mHalf][1] + omegaImag * X[k + j + mHalf][0];
const uReal = X[k + j][0];
const uImag = X[k + j][1];
X[k + j] = [uReal + tReal, uImag + tImag];
X[k + j + mHalf] = [uReal - tReal, uImag - tImag];
// Twiddle factor update
const tempReal = omegaReal * Math.cos(angleIncrement) -
omegaImag * Math.sin(angleIncrement);
omegaImag =
omegaReal * Math.sin(angleIncrement) +
omegaImag * Math.cos(angleIncrement);
omegaReal = tempReal;
}
}
}
return X;
};
exports.fftFast = fftFast;
/***/ }),
/***/ 9894:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getVisualization = void 0;
const fft_accurate_1 = __webpack_require__(4750);
const fft_fast_1 = __webpack_require__(5226);
const mag_1 = __webpack_require__(8764);
const smoothing_1 = __webpack_require__(7557);
const to_int_16_1 = __webpack_require__(1868);
const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, dataOffsetInSeconds, }) => {
const isPowerOfTwo = sampleSize > 0 && (sampleSize & (sampleSize - 1)) === 0;
if (!isPowerOfTwo) {
throw new TypeError(`The argument "bars" must be a power of two. For example: 64, 128. Got instead: ${sampleSize}`);
}
if (!fps) {
throw new TypeError('The argument "fps" was not provided');
}
if (data.length < sampleSize) {
throw new TypeError('Audio data is not big enough to provide ' + sampleSize + ' bars.');
}
const start = Math.floor((frame / fps - dataOffsetInSeconds) * sampleRate);
const actualStart = Math.max(0, start - sampleSize / 2);
const ints = new Int16Array({
length: sampleSize,
});
ints.set(data.subarray(actualStart, actualStart + sampleSize).map((x) => (0, to_int_16_1.toInt16)(x)));
const alg = optimizeFor === 'accuracy' ? fft_accurate_1.fftAccurate : fft_fast_1.fftFast;
const phasors = alg(ints);
const magnitudes = (0, mag_1.fftMag)(phasors).map((p) => p);
return (0, smoothing_1.smoothen)(magnitudes).map((m) => m / (sampleSize / 2) / maxInt);
};
exports.getVisualization = getVisualization;
/***/ }),
/***/ 8764:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fftMag = void 0;
const complex_1 = __webpack_require__(1603);
const fftMag = function (fftBins) {
const ret = fftBins.map((f) => (0, complex_1.complexMagnitude)(f));
return ret.slice(0, ret.length / 2);
};
exports.fftMag = fftMag;
/***/ }),
/***/ 2268:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getMaxPossibleMagnitude = void 0;
const to_int_16_1 = __webpack_require__(1868);
const getMax = (array) => {
let max = 0;
for (let i = 0; i < array.length; i++) {
const val = array[i];
if (val > max) {
max = val;
}
}
return max;
};
const cache = {};
const getMaxPossibleMagnitude = (metadata) => {
if (cache[metadata.resultId]) {
return cache[metadata.resultId];
}
const result = (0, to_int_16_1.toInt16)(getMax(metadata.channelWaveforms[0]));
cache[metadata.resultId] = result;
return result;
};
exports.getMaxPossibleMagnitude = getMaxPossibleMagnitude;
/***/ }),
/***/ 7557:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// Adapted from node-fft project by Joshua Wong and Ben Bryan
// https://github.com/vail-systems/node-fft
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.smoothen = void 0;
const smoothingPasses = 3;
const smoothingPoints = 3;
const smoothen = function (array) {
let lastArray = array;
const newArr = [];
for (let pass = 0; pass < smoothingPasses; pass++) {
const sidePoints = Math.floor(smoothingPoints / 2); // our window is centered so this is both nL and nR
const cn = 1 / (2 * sidePoints + 1); // constant
for (let i = 0; i < sidePoints; i++) {
newArr[i] = lastArray[i];
newArr[lastArray.length - i - 1] = lastArray[lastArray.length - i - 1];
}
for (let i = sidePoints; i < lastArray.length - sidePoints; i++) {
let sum = 0;
for (let n = -sidePoints; n <= sidePoints; n++) {
sum += cn * lastArray[i + n] + n;
}
newArr[i] = sum;
}
lastArray = newArr;
}
return newArr;
};
exports.smoothen = smoothen;
/***/ }),
/***/ 1868:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toInt16 = void 0;
const toInt16 = (x) => (x > 0 ? x * 0x7fff : x * 0x8000);
exports.toInt16 = toInt16;
/***/ }),
/***/ 5576:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getAudioData = void 0;
const fetch_with_cors_catch_1 = __webpack_require__(5633);
const is_remote_asset_1 = __webpack_require__(5448);
const p_limit_1 = __webpack_require__(1082);
const metadataCache = {};
const limit = (0, p_limit_1.pLimit)(3);
const fn = async (src, options) => {
var _a;
if (metadataCache[src]) {
return metadataCache[src];
}
if (typeof document === 'undefined') {
throw new Error('getAudioData() is only available in the browser.');
}
const audioContext = new AudioContext({
sampleRate: (_a = options === null || options === void 0 ? void 0 : options.sampleRate) !== null && _a !== void 0 ? _a : 48000,
});
const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src);
if (!response.ok) {
throw new Error(`Failed to fetch audio data from ${src}: ${response.status} ${response.statusText}`);
}
const arrayBuffer = await response.arrayBuffer();
const wave = await audioContext.decodeAudioData(arrayBuffer);
const channelWaveforms = new Array(wave.numberOfChannels)
.fill(true)
.map((_, channel) => {
return wave.getChannelData(channel);
});
const metadata = {
channelWaveforms,
sampleRate: wave.sampleRate,
durationInSeconds: wave.duration,
numberOfChannels: wave.numberOfChannels,
resultId: String(Math.random()),
isRemote: (0, is_remote_asset_1.isRemoteAsset)(src),
};
metadataCache[src] = metadata;
return metadata;
};
/*
* @description Takes an audio or video src, loads it and returns data and metadata for the specified source.
* @see [Documentation](https://remotion.dev/docs/get-audio-data)
*/
const getAudioData = (src, options) => {
return limit(fn, src, options);
};
exports.getAudioData = getAudioData;
/***/ }),
/***/ 7970:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getAudioDuration = exports.getAudioDurationInSeconds = void 0;
/* eslint-disable @typescript-eslint/no-use-before-define */
const media_tag_error_handling_1 = __webpack_require__(4970);
const p_limit_1 = __webpack_require__(1082);
const limit = (0, p_limit_1.pLimit)(3);
const metadataCache = {};
const fn = (src) => {
if (metadataCache[src]) {
return Promise.resolve(metadataCache[src]);
}
if (typeof document === 'undefined') {
throw new Error('getAudioDuration() is only available in the browser.');
}
const audio = document.createElement('audio');
audio.src = src;
return new Promise((resolve, reject) => {
const onError = () => {
(0, media_tag_error_handling_1.onMediaError)({
error: audio.error,
src,
cleanup,
reject,
api: 'getAudioDurationInSeconds()',
});
};
const onLoadedMetadata = () => {
metadataCache[src] = audio.duration;
resolve(audio.duration);
cleanup();
};
const cleanup = () => {
audio.removeEventListener('loadedmetadata', onLoadedMetadata);
audio.removeEventListener('error', onError);
audio.remove();
};
audio.addEventListener('loadedmetadata', onLoadedMetadata, { once: true });
audio.addEventListener('error', onError, { once: true });
});
};
/**
* @description Gets the duration in seconds of an audio source by creating an invisible `