I want to make a program on the web which will capture an image via the user\'s webcam.
I am using the getUserMedia Web API. Here is my code, but it do
There have been updates to the getUserMedia api that now expose takePhoto and grabFrame. The grabFramemethod is less exciting because it does what we have been doing all along and just returning the next video frame from the stream but takePhoto interupts the stream to use the cameras "highest available photographic camera resolution" to capture a compressed image blob. More details here: google devs
var stream, imageCapture;
function getMediaStream()
{
window.navigator.mediaDevices.getUserMedia({video: true})
.then(function(mediaStream)
{
stream = mediaStream;
let mediaStreamTrack = mediaStream.getVideoTracks()[0];
imageCapture = new ImageCapture(mediaStreamTrack);
console.log(imageCapture);
})
.catch(error);
}
function error(error)
{
console.error('error:', error);
}
function takePhoto(img)
{
const img = img || document.querySelector('img');
imageCapture.takePhoto()
.then(blob => {
let url = window.URL.createObjectURL(blob);
img.src = url;
window.URL.revokeObjectURL(url);
})
.catch(error);
};
/* just call */
getMediaStream();
/* and when you want to capture an image */
takePhoto();