问题
I want to record a video from a HTML <canvas>
element at a specific frame rate.
I am using CanvasCaptureMediaStream with canvas.captureStream(fps)
and also have access to the video track via const track = stream.getVideoTracks()[0]
so I create track.requestFrame()
to write it to the output video buffer via MediaRecorder
.
I want to precisely capture one frame at a time and then change the canvas content. Changing the canvas content can take some time (as images need to be loaded etc). So I can not capture the canvas in real-time. Some changes on the canvas would happen in 500ms real-time so this needs also to be adjusted to rendering one frame at the time.
回答1:
The MediaRecorder API is meant to record live-streams, doing edition is not what it was designed to do, and it doesn't do it very well to be honest...
The MediaRecorder itself has no concept of frame-rate, this is normally defined by the MediaStreamTrack. However, the CanvasCaptureStreamTrack doesn't really make it clear what its frame rate is.
We can pass a parameter to HTMLCanvas.captureStream()
, but this only tells the max frames we want per seconds, it's not really an fps parameter.
Also, even if we stop drawing on the canvas, the recorder will still continue to extend the duration of the recorded video in real time (I think that technically only a single long frame is recorded though in this case).
So... we gonna have to hack around...
One thing we can do with the MediaRecorder is to pause()
and resume()
it.
Then sounds quite easy to pause before doing the long drawing operation and to resume right after it's been made? Yes... and not that easy either...
Once again, the frame-rate is dictated by the MediaStreamTrack, but this MediaStreamTrack can not be paused.
Well, actually there is one way to pause a special kind of MediaStreamTrack, and luckily I'm talking about CanvasCaptureMediaStreamTracks.
When we do call our capture-stream with a parameter of 0
, we are basically having manual control over when new frames are added to the stream.
So here we can synchronize both our MediaRecorder adn our MediaStreamTrack to whatever frame-rate we want.
The basic workflow is
await the_long_drawing_task;
resumeTheRecorder();
writeTheFrameToStream(); // track.requestFrame();
await wait( time_per_frame );
pauseTheRecorder();
Doing so, the recorder is awaken only the time per frame we decided, and a single frame is passed to the MediaStream during this time, effectively mocking a constant FPS drawing for what the MediaRecorder is concerned.
But as always, hacks in this still experimental area come with a lot of browsers weirdness and the following demo actually only works in current Chrome...
For whatever reasons, Firefox will always generate files with twice the number of frames than what has been requested, and it will also occasionally prepend a long first frame...
But anyway, here is the verbose code.
const audioTimer = new AudioTimer();
setTimeout(() => {
if( audioTimer.requiresActivation ) {
console.log( 'please click anywhere' );
}
});
(async () => {
const FPS = 30;
const duration = 10; // seconds
await audioTimer.schedule(0);
const ctx = canvas.getContext('2d');
ctx.fillStyle = 'gray';
let x = 0;
let frame = 0;
const chunks = [];
// Firefox has very weird behavior where the first frame will last since the time
// the stream as been generated, no matter when the recorder is created.
// In other words,
// draw();
// await wait( 10000 )
// rec.start()
// will produce a 10s still first frame in the recording...
// Add to that the fact it will take ~1000ms for the recorder to start
// at the load of the document (for unknown reasons...), but it won't start before
// something is drawn on the canvas, we have to add a nasty workaround where
// we wait a few ms before initializing anything.
await longDraw(); // draw first frame or the recorder can't start
await wait( 1500 );
// only now we can get the stream
const stream = window.stream = canvas.captureStream( 0 );
const rec = new MediaRecorder( stream, { mimeType: 'video/webm; codecs="vp8"' } );
const video_track = stream.getVideoTracks()[0];
rec.ondataavailable = (e) => chunks.push( e.data );
rec.onstop = (e) => {
vid.src = URL.createObjectURL( new Blob( chunks ) );
// workaround https://crbug.com/642012
vid.onloadedmetadata = (e) => vid.currentTime = 1e100;
download( vid.src, 'movie.webm' );
};
rec.onstart = async (e) => {
await pause();
mainLoop();
};
rec.start();
// Promise based pausing of the recorder
function pause() {
return new Promise( (res) => {
rec.addEventListener( 'pause', res, { once: true } );
rec.pause();
} );
}
// Promise based awakening of the recorder
function resume() {
return new Promise( (res) => {
rec.addEventListener( 'resume', res, { once: true } );
rec.resume();
} );
}
async function mainLoop() {
while( frame++ < FPS * duration ) { // one frame at a time
// do the long drawing
await longDraw();
// so we are sure our drawings have been passed
await afterNextFrame();
// start our timer now
const timer = audioTimer.schedule( 1000 / FPS );
// wake up the recorder
await resume();
requestFrame(); // write the frame
await timer; // wait until our frame-time elapsed
await pause(); // sleep recorder
}
// once all done
rec.stop();
}
// Firefox still uses a non-standard CanvasCaptureMediaStream
// instead of CanvasCaptureMediaStreamTrack
function requestFrame() {
if( video_track.requestFrame ) { // standards
video_track.requestFrame();
}
else { // Firefox
stream.requestFrame();
}
}
// Fake long drawing operations that make real-time recording impossible
function longDraw() {
return wait( Math.random() * 250 )
.then( draw );
}
function draw() {
x = (x + 1) % canvas.width;
ctx.clearRect( 0, 0, canvas.width, canvas.height );
ctx.fillRect( x, 0, 50, 50 );
};
})();
////////////
// Helpers
////////////
// Promise based timeout
function wait( time ) {
return new Promise( (res) => setTimeout( res, time ) );
};
// Promise wrapper around experimental requestPostAnimationFrame
function afterNextFrame() {
return new Promise( (res) => requestPostAnimationFrame( res ) );
}
// creates a downloadable anchor from url
function download( url, filename = "file.ext" ) {
a = document.createElement( 'a' );
a.textContent = a.download = filename;
a.href = url;
document.body.append( a );
return a;
}
<canvas id="canvas"></canvas>
<video id="vid" controls></video>
<script>
// Some optional(?) goodies
// A Web-Audio based timer, abusing AudioScheduledSourceNode
// Allows for quite correct scheduling even in blurred pages
class AudioTimer {
constructor() {
const AudioCtx = window.AudioContext || window.webkitAudioContext;
const context = this.context = (
AudioTimer.shared_context || (AudioTimer.shared_context = new AudioCtx())
);
const silence = this.silence = context.createGain();
silence.gain.value = 0;
silence.connect( context.destination );
setTimeout( (_) => {
if( this.requiresActivation ) {
document.addEventListener( 'click', (e) => context.resume(), { once: true } );
}
}, 0);
}
async schedule( time ) {
const context = this.context;
await context.resume(); // in case we need user activation
return new Promise( (res) => {
const node = context.createOscillator();
node.connect( this.silence );
node.onended = (e) => res();
node.start(0);
node.stop( context.currentTime + (time / 1000) );
})
}
get requiresActivation() {
return this.context.state === "suspended";
}
}
// implements a sub-optimal monkey-patch for requestPostAnimationFrame
// see https://stackoverflow.com/a/57549862/3702797 for details
if( !window.requestPostAnimationFrame ) {
window.requestPostAnimationFrame = function monkey( fn ) {
const channel = new MessageChannel();
channel.port2.onmessage = e => fn( e.data );
requestAnimationFrame( (t) => channel.port1.postMessage( t ) );
};
}
</script>
来源:https://stackoverflow.com/questions/58907270/record-at-constant-fps-with-canvascapturemediastream-even-on-slow-computers