Here's my code to capture video stream and encode to webm (Using https://github.com/GoogleChromeLabs/webm-wasm):
async function captureWebm() {
console.log("Started webm capture")
worker.postMessage("./webm-wasm.wasm");
await nextEvent(worker, "message");
console.log('webm worker loaded')
worker.postMessage({
width: w,
height: h,
timebaseNum: 1,
timebaseDen: 30,
bitrate: 1500,
realtime: true
});
let encodeWebm = async function() {
mCtx.drawImage(player, 0, 0, w, h);
const imageData = mCtx.getImageData(0, 0, w, h);
const buffer = imageData.data.buffer;
worker.postMessage(buffer, [buffer]);
requestAnimationFrame(encodeWebm);
};
requestAnimationFrame(encodeWebm);
}
And here's the listener:
let queue = [];
worker.onmessage = ev => {
if (!ev.data) {
console.log('End of stream');
}
if (ev.data instanceof ArrayBuffer && ev.data.byteLength !== undefined) {
const arrayBuffer = ev.data;
queue.push(arrayBuffer);
}
};
And finally build the video:
setInterval(function () {
let webm = buildWebmVideoFromArrayOfBuffer();
queue = [];
socket.send(webm);
}, 500);
Without using MediaSource
how can you build a webm video from this array of buffer? My goal is the build a webm video every 500 ms.
See Question&Answers more detail:
os 与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…