I am trying to develop a system where there are two clients that can video chat to each other from their browsers over a server. First client sends its video stream to the server and the server sends it to the second client. Also, server saves client's stream as a video file.
I used this WebRTC example: https://github.com/webrtc/samples/blob/master/src/content/getusermedia/source/js/main.js
Server side;
var express = require('express');
var app = express();
var http = require('http').Server(app);
var io = require('socket.io')(http);
app.use(express.static('C:/source/'));
app.get('/', function(req, res) {
res.sendFile('C:/source/index.html');
});
io.on('connection', function(socket) {
console.log('user connected.');
socket.on('disconnect', function() {
console.log('user disconnected.');
});
socket.on('chat message', function(msg) {
?
});
});
http.listen(3000, function(){
console.log('listening on *:3000');
});
Client side;
var socket = io();
while(streaming) {
socket.emit(?);
}
I can't understand that from which source I should emit the video + audio stream of the client to the server. If I successfully upload the stream, I will be able to handle it on server side.