Skip to content

Commit

Permalink
Merge pull request #196 from bilibili/feat/optz-perf
Browse files Browse the repository at this point in the history
Feat/optz perf
  • Loading branch information
hughfenghen authored Jul 23, 2024
2 parents e2f6f20 + 3cb8c87 commit 284fad9
Show file tree
Hide file tree
Showing 7 changed files with 217 additions and 89 deletions.
5 changes: 5 additions & 0 deletions .changeset/two-rules-eat.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@webav/av-cliper': patch
---

perf: imporve perf for MP4Clip
74 changes: 74 additions & 0 deletions packages/av-cliper/demo/performance.demo.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import { ImgClip, MP4Clip } from '../src/clips';
import { Combinator } from '../src/combinator';
import { Log } from '../src/log';
import { OffscreenSprite } from '../src/sprite/offscreen-sprite';
import { renderTxt2ImgBitmap } from '../src/dom-utils';
import { file, write } from 'opfs-tools';

const progressEl = document.querySelector('#progress')!;
const startTimeEl = document.querySelector('#startTime')!;
const costEl = document.querySelector('#cost')!;

document.querySelector('#frag-10min')?.addEventListener('click', () => {
(async () => {
const resPath = '/video/pri-bunny_1080p_avc-frag.mp4';
// const resPath = '/video/pri-cut-5.mp4';

const otFile = file(resPath);

if (!(await otFile.exists())) {
await write(otFile, (await fetch(resPath)).body!);
}

let t = performance.now();
const spr1 = new OffscreenSprite(new MP4Clip(otFile));
await spr1.ready;
console.log('111111111', performance.now() - t);
const width = 1920;
const height = 1080;
// spr1.rect.y = (height - spr1.rect.h) / 2;
// spr1.rect.w = 1920;

const spr2 = new OffscreenSprite(
new ImgClip(
await renderTxt2ImgBitmap(
'示例文字',
`font-size:40px; color: white; text-shadow: 2px 2px 6px red;`,
),
),
);
await spr2.ready;
spr2.rect.x = (width - spr2.rect.w) / 2;
spr2.rect.y = (height - spr2.rect.h) / 2;

spr1.time.duration = 100e6;
console.log('resolution:', { width, height });
const com = new Combinator({
width,
height,
videoCodec: 'avc1.4d4028',
bgColor: 'black',
bitrate: 3e6,
// audio: false,
metaDataTags: { hello: 'world' },
});

await com.addSprite(spr1, { main: true });
await com.addSprite(spr2);

startTimeEl.textContent = new Date().toLocaleTimeString();
let startTs = performance.now();
com.on('OutputProgress', (v) => {
progressEl.textContent = Math.round(v * 100) + '%';
if (v === 1) {
costEl.textContent = String(~~(performance.now() - startTs));
}
});

write(
file(`/perf-test/${new Date().toLocaleTimeString()}.mp4`),
com.output(),
);
// com.output().pipeTo(await createFileWriter());
})().catch(Log.error);
});
47 changes: 47 additions & 0 deletions packages/av-cliper/demo/performance.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
<!DOCTYPE html>
<html lang="en">

<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="shortcut icon" href="data:image/x-icon;," type="image/x-icon">

<title>AVCliper Concat Media</title>
<link href="https://hughfenghen.github.io/font/noto-sans.css" rel="stylesheet">

<style>
#app {
width: 900px;
height: 500px;
margin: 50px 50px;
overflow: hidden;
position: relative;
}

canvas {
width: 900px;
height: 500px;
}
</style>
</head>

<body>
<!-- <canvas id="canvas" width="1280" height="720"></canvas> -->
<!-- <br> -->
<button id="frag-10min">frag-10min</button>
<p />
<div>
<span>startTime:</span><span id="startTime"></span> |
<span>cost:</span><span id="cost">---</span>
</div>
<div id="progress"></div>
<hr>
<script type="module" src="performance.demo.ts"></script>
<script src="https://cdn.jsdelivr.net/npm/opfs-tools-explorer"></script>
<script>
OTExplorer.init();
</script>

</body>

</html>
149 changes: 87 additions & 62 deletions packages/av-cliper/src/clips/mp4-clip.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@ import { MP4Info, MP4Sample } from '@webav/mp4box.js';
import {
audioResample,
autoReadStream,
concatPCMFragments,
extractPCM4AudioData,
sleep,
} from '../av-utils';
import { Log } from '../log';
import { extractFileConfig, sample2ChunkOpts } from '../mp4-utils/mp4box-utils';
import { extractFileConfig } from '../mp4-utils/mp4box-utils';
import { SampleTransform } from '../mp4-utils/sample-transform';
import { DEFAULT_AUDIO_CONF, IClip } from './iclip';
import { file, tmpfile, write } from 'opfs-tools';
Expand Down Expand Up @@ -687,10 +686,10 @@ class VideoFrameFinder {
if (samples[0]?.is_sync !== true) {
Log.warn('First sample not key frame');
} else {
const chunks = await Promise.all(
samples.map((s) =>
sample2Chunk(s, EncodedVideoChunk, this.localFileReader),
),
const chunks = await samples2Chunks(
samples,
EncodedVideoChunk,
this.localFileReader,
);
// Wait for the previous asynchronous operation to complete, at which point the task may have already been terminated
if (aborter.abort) return null;
Expand Down Expand Up @@ -806,29 +805,26 @@ class AudioFrameFinder {
#ts = 0;
#decCusorIdx = 0;
#decoding = false;
#pcmData: [Float32Array, Float32Array] = [
new Float32Array(0), // left chan
new Float32Array(0), // right chan
];
#pcmData: {
frameCnt: number;
data: [Float32Array, Float32Array][];
} = {
frameCnt: 0,
data: [],
};
#parseFrame = async (
deltaTime: number,
dec: ReturnType<typeof createAudioChunksDecoder> | null = null,
aborter: { abort: boolean },
): Promise<Float32Array[]> => {
if (dec == null || aborter.abort || dec.state === 'closed') return [];

const frameCnt = Math.ceil(deltaTime * (this.#sampleRate / 1e6));
if (frameCnt === 0) return [];
const emitFrameCnt = Math.ceil(deltaTime * (this.#sampleRate / 1e6));
if (emitFrameCnt === 0) return [];

// 数据满足需要
if (this.#pcmData[0].length > frameCnt) {
const audio = [
this.#pcmData[0].slice(0, frameCnt),
this.#pcmData[1].slice(0, frameCnt),
];
this.#pcmData[0] = this.#pcmData[0].slice(frameCnt);
this.#pcmData[1] = this.#pcmData[1].slice(frameCnt);
return audio;
if (this.#pcmData.frameCnt > emitFrameCnt) {
return emitAudioFrames(this.#pcmData, emitFrameCnt);
}

if (this.#decoding) {
Expand All @@ -840,21 +836,20 @@ class AudioFrameFinder {
} else {
// 启动解码任务
const samples = [];
for (let i = this.#decCusorIdx; i < this.samples.length; i++) {
this.#decCusorIdx = i;
let i = this.#decCusorIdx;
while (i < this.samples.length) {
const s = this.samples[i];
const next = this.samples[i + 1];
i += 1;
if (s.deleted) continue;
if (samples.length >= 10) break;
samples.push(s);
if (next == null || s.offset + s.size !== next.offset) break;
}
this.#decCusorIdx = i;

this.#decoding = true;
dec.decode(
await Promise.all(
samples.map((s) =>
sample2Chunk(s, EncodedAudioChunk, this.localFileReader),
),
),
await samples2Chunks(samples, EncodedAudioChunk, this.localFileReader),
(pcmArr, done) => {
if (pcmArr.length === 0) return;
// 音量调节
Expand All @@ -865,10 +860,8 @@ class AudioFrameFinder {
// 补齐双声道
if (pcmArr.length === 1) pcmArr = [pcmArr[0], pcmArr[0]];

this.#pcmData = concatPCMFragments([this.#pcmData, pcmArr]) as [
Float32Array,
Float32Array,
];
this.#pcmData.data.push(pcmArr as [Float32Array, Float32Array]);
this.#pcmData.frameCnt += pcmArr[0].length;
if (done) this.#decoding = false;
},
);
Expand All @@ -879,10 +872,10 @@ class AudioFrameFinder {
reset = () => {
this.#ts = 0;
this.#decCusorIdx = 0;
this.#pcmData = [
new Float32Array(0), // left chan
new Float32Array(0), // right chan
];
this.#pcmData = {
frameCnt: 0,
data: [],
};
this.#dec?.close();
this.#decoding = false;
this.#dec = createAudioChunksDecoder(
Expand All @@ -898,16 +891,16 @@ class AudioFrameFinder {
decQSize: this.#dec?.decodeQueueSize,
decCusorIdx: this.#decCusorIdx,
sampleLen: this.samples.length,
pcmLen: this.#pcmData[0]?.length,
pcmLen: this.#pcmData.frameCnt,
});

destroy = () => {
this.#dec = null;
this.#curAborter.abort = true;
this.#pcmData = [
new Float32Array(0), // left chan
new Float32Array(0), // right chan
];
this.#pcmData = {
frameCnt: 0,
data: [],
};
this.localFileReader.close();
};
}
Expand Down Expand Up @@ -1016,24 +1009,56 @@ function createPromiseQueue<T extends any>(onResult: (data: T) => void) {
};
}

type Constructor<T> = {
new (...args: any[]): T;
};
function emitAudioFrames(
pcmData: { frameCnt: number; data: [Float32Array, Float32Array][] },
emitCnt: number,
) {
const audio = [new Float32Array(emitCnt), new Float32Array(emitCnt)];
let offset = 0;
let i = 0;
for (; i < pcmData.data.length; ) {
const [chan0, chan1] = pcmData.data[i];
if (offset + chan0.length > emitCnt) {
const gapCnt = emitCnt - offset;
audio[0].set(chan0.subarray(0, gapCnt), offset);
audio[1].set(chan1.subarray(0, gapCnt), offset);
pcmData.data[i][0] = chan0.subarray(gapCnt, chan0.length);
pcmData.data[i][1] = chan1.subarray(gapCnt, chan1.length);
break;
} else {
audio[0].set(chan0, offset);
audio[1].set(chan1, offset);
offset += chan0.length;
i++;
}
}
pcmData.data = pcmData.data.slice(i);
pcmData.frameCnt -= emitCnt;
return audio;
}

async function sample2Chunk<T extends EncodedAudioChunk | EncodedVideoChunk>(
s: ExtMP4Sample,
clazz: Constructor<T>,
async function samples2Chunks<T extends EncodedAudioChunk | EncodedVideoChunk>(
samples: ExtMP4Sample[],
Clazz: { new (...args: any[]): T },
reader: Awaited<ReturnType<OPFSToolFile['createReader']>>,
): Promise<T> {
// todo: perf
const data = await reader.read(s.size, { at: s.offset });
return new clazz(
// todo: perf
sample2ChunkOpts({
...s,
data,
): Promise<T[]> {
const first = samples[0];
const last = samples.at(-1);
if (last == null) return [];
const data = new Uint8Array(
await reader.read(last.offset + last.size - first.offset, {
at: first.offset,
}),
);
return samples.map((s) => {
const offset = s.offset - first.offset;
return new Clazz({
type: s.is_sync ? 'key' : 'delta',
timestamp: s.cts,
duration: s.duration,
data: data.subarray(offset, offset + s.size),
});
});
}

function createVF2BlobConvtr(
Expand Down Expand Up @@ -1213,13 +1238,13 @@ async function thumbnailByKeyFrame(
dec.close();
});

const chunks = await Promise.all(
samples
.filter(
(s) =>
!s.deleted && s.is_sync && s.cts >= time.start && s.cts <= time.end,
)
.map((s) => sample2Chunk(s, EncodedVideoChunk, fileReader)),
const chunks = await samples2Chunks(
samples.filter(
(s) =>
!s.deleted && s.is_sync && s.cts >= time.start && s.cts <= time.end,
),
EncodedVideoChunk,
fileReader,
);
if (chunks.length === 0 || abortSingl.aborted) return;

Expand Down
Loading

0 comments on commit 284fad9

Please sign in to comment.