I'm developing chrome extension which makes the following:
get access to the stream obtained from
chrome.tabCapture.capture
(when capturing video, let's ignore for now audio capture since it's not relevant to the issue I'm facing)pass this
tabStream
toURL.createObjectURL(tabStream)
use the resulting url as a
src
for DOM Video ElementvideoEl.src = URL.createObjectURL(tabStream)
invoke
videoEl.play()
and when thecanplay
event is calledpass the
videoEl
as an argument to canvas's contextdrawImage
methodsince now video frames are rendered into the canvas element one can perform lots of useful operations on that frames (cropping, watermarking etc)
till this point all works perfectly. But the following two final steps do not work:
create a stream out of the canvas element using
canvasStream = canvasEl.captureStream(20)
pass this stream to
MediaRecorder
(recorder = new MediaRecorder(canvasStream)
) and start recording:recorder.start()
Essentially if this approach is used outside of chrome extension's background (like here: https://jsfiddle.net/williamwallacebrave/2rgv7pgj/7/) all works perfectly. But when used inside the chrome extension background, well I can clearly detect that video frames are send and rendered in canvas element but somehow either the canvasEl.captureStream()
is not pushing the data or recorder is not able to pick them up. Also if that approach is used inside the content scripts again all works perfectly. But in content scripts I'm not able to get access to tabCapture
stream.
This is my manifest file:
{
"name": "super app",
"manifest_version": 2,
"description": "...",
"version": "0.0.1",
"content_security_policy": "script-src 'self' 'unsafe-eval'; object-src 'self'",
"page_action": {
"default_title": "app",
"default_icon": "static/images/logo.png"
},
"icons": {
"128": "static/images/logo.png"
},
"background": {
"page": "background.html"
},
"content_scripts": [
{
"matches": ["<all_urls>"],
"exclude_matches": ["http://localhost:3000/*"],
"css": [
"static/css/style.css"
],
"js": [
"vendor/system.js",
"vendor/jquery.min.js",
"content/config.js",
"content/index.js"
]
}
],
"web_accessible_resources": [
"background/*",
"vendor/*",
"content/*",
"common/*.js",
"capturer.html",
"static/*",
"*"
],
"externally_connectable": {
"matches": [
"http://localhost:3000/*"
]
},
"permissions": [
"tabs",
"activeTab",
"<all_urls>",
"clipboardRead",
"clipboardWrite",
"tabCapture",
"notifications",
"tts"
]
}
Here is the dummy code which when run as a content script works perfectly fine but doesn't work as background:
// SOURCE: http://stackoverflow.com/questions/39302814/mediastream-capture-canvas-and-audio-simultaneously#39302994
var cStream,
aStream,
recorder,
chunks = [],
canvasEl = document.createElement('canvas');
canvasEl.width = 400;
canvasEl.height = 400;
document.body.appendChild(canvasEl);
/*
create and run external video
*/
var videoEl = document.createElement('video');
videoEl.crossOrigin = 'anonymous';
videoEl.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
videoEl.play();
videoEl.addEventListener('play', function(){
var audioCtx = new AudioContext();
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
var sourceNode = audioCtx.createMediaElementSource(this);
console.log('connected audio');
sourceNode.connect(dest);
// output to our headphones
sourceNode.connect(audioCtx.destination)
var canvasCtx = canvasEl.getContext('2d');
console.log('play video in canvas');
draw(this, canvasCtx);
startRecording();
setTimeout(() => {
stopRecording();
}, 10000)
}, false);
function exportStream(e) {
console.log('exportStream', chunks.length);
if (chunks.length) {
var blob = new Blob(chunks),
videoURL = URL.createObjectURL(blob),
resultVideoEl = document.createElement('video');
resultVideoEl.controls = true;
resultVideoEl.src = videoURL;
resultVideoEl.onend = function() {
URL.revokeObjectURL(videoURL);
}
document.body.insertBefore(resultVideoEl, canvasEl);
} else {
document.body.insertBefore(
document.createTextNode('no data saved'), canvasEl);
}
}
function saveChunks(e) {
console.log('save chunks', e.data.size);
e.data.size && chunks.push(e.data);
}
function stopRecording() {
console.log('STOP RECORDING');
videoEl.pause();
recorder.stop();
}
function startRecording() {
console.log('START RECORDING');
cStream = canvasEl.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
// =============================================
// THIS PART IS NOT FIRED WHEN RUN IN BACKGROUND
// and final chunks is always an empty array.
// =============================================
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
}
function draw(v,ctx) {
if(videoEl.paused || videoEl.ended) return false;
// here I'm cropping the video frames and taking only 400 by 400
// square shifted by 100, 100 vector
ctx.drawImage(v, 100, 100, 400, 400, 0, 0, 400,400);
setTimeout(draw,20,v,ctx);
}
Also please notice that this captureStream and MediaRecorder are relatively new so you need Chrome 51+ in order to run that example