1

I'm trying to follow this tutorial: https://www.tutorialspoint.com/electron/electron_audio_and_video_capturing.htm

The first part of the tutorial worked fine... I can stream av from my pc camera and mic... into my electron app. But now I'm trying to do is stream audio and video from a specific application running on my windows desktop via the desktopCapturer object.

Problem

I'm not getting any errors. But the electron app's video html tag is not showing the stream from myappthatstreamsAV.

Code

I changed my index.html code to look like this: (just changed stuff inside the tag)

<!DOCTYPE html>
<html>
   <head>
      <meta charset = "UTF-8">
      <title>Audio and Video</title>
   </head>
   
   <body>
      <video autoplay></video>
      <script type = "text/javascript">
        var desktopCapturer = require('electron').desktopCapturer;
        desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
        if (error) throw error
         desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
            if (error) throw error
            for (let i = 0; i < sources.length; ++i) {
               if (sources[i].name === 'myappthatstreamsAV') {
                  navigator.webkitGetUserMedia({
                     audio: true,
                     video: {
                        mandatory: {
                           chromeMediaSource: 'desktop',
                           chromeMediaSourceId: sources[i].id,
                           minWidth: 1280,
                           maxWidth: 1280,
                           minHeight: 720,
                           maxHeight: 720
                        }
                     }
                  }, handleStream, handleError)
                  return
               }
            }
         })

         function handleStream (stream) {
            document.querySelector('video').src = URL.createObjectURL(stream)
         }

         function handleError (e) {
            console.log(e)
         }
      </script>
   </body>
</html>

and the index.js looks like this:

const {app, BrowserWindow} = require('electron')
const url = require('url')
const path = require('path')

let win

// Set the path where recordings will be saved
app.setPath("userData", __dirname + "/saved_recordings")

function createWindow() {
   win = new BrowserWindow({width: 800, height: 600,
      webPreferences: {
         nodeIntegration: true
     }
   })
   win.loadURL(url.format({
      pathname: path.join(__dirname, 'index.html'),
      protocol: 'file:',
      slashes: true
   }))
}

app.on('ready', createWindow)

What I've tried so far:

I added some debug statements like this:

  <script type = "text/javascript">
     var desktopCapturer = require('electron').desktopCapturer;
     console.log("1")
     console.log(desktopCapturer)
     desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
        console.log("2")
        if (error) throw error
        for (let i = 0; i < sources.length; ++i) {
           console.log((sources[i].name));
           console.log("3")

and basically, it executes only the first two console.logs:

     console.log("1")
     console.log(desktopCapturer)

It never gets to 2 or 3.

dot
  • 14,928
  • 41
  • 110
  • 218
  • 1
    Does this answer your question? [Electron require() is not defined](https://stackoverflow.com/questions/44391448/electron-require-is-not-defined) – snwflk Sep 16 '20 at 20:07
  • @snwflk yes. that allows me to define the variable. But now the code isn't working. I don't have any stream that appears in my electron app. No errors. Will update questions – dot Sep 16 '20 at 20:15

1 Answers1

0

Changed my code to look like this:

     var desktopCapturer = require('electron').desktopCapturer;
     console.log("are you here?")
     console.log(desktopCapturer)

     desktopCapturer.getSources({ types: ['window', 'screen'] }).then(async sources => {
        for (const source of sources) {
           if (source.name === 'mystreamApp') {
              try {
              const stream = await navigator.mediaDevices.getUserMedia({
                 audio: true,
                 video: {
                    mandatory: {
                    chromeMediaSource: 'desktop',
                    chromeMediaSourceId: source.id,
                    minWidth: 1280,
                    maxWidth: 1280,
                    minHeight: 720,
                    maxHeight: 720
                    }
                 }
              })
              handleStream(stream)
              } catch (e) {
              handleError(e)
              }
              return
           }
        }
        })

        function handleStream (stream) {
        const video = document.querySelector('video')
        video.srcObject = stream
        video.onloadedmetadata = (e) => video.play()
        }

        function handleError (e) {
        console.log(e)
        }

and now I see the video stream. Audio is still not working. But i'll open another question for that.

dot
  • 14,928
  • 41
  • 110
  • 218