1

I'm new into front end and I am trying to make a Zoom clone using Blazor. Right now I can open the camera and get the stream and send with signalR, but I can't find a way to play the video in the clients. I don't know much of JS, so I get the code from this questions in this very site:

Get a stream of bytes from navigator.mediaDevices.getUserMedia()?

Convert blob to base64

How to receive continuous chunk of video as a blob array and set to video tag dynamically in Websocket

The JS code

let stream = null;
let recorder = null;
let videoData = null;
let videoTeste = null;

let chunks = [];

let wholeVideo = [];
let mediaRecorder;

async function onStart(options) {
    let video = document.getElementById(options.videoID);
    if (navigator.mediaDevices.getUserMedia) {
        try {
            stream = await navigator.mediaDevices.getUserMedia({ video: true });
            video.srcObject = stream;
            video.play();
            recorder = new MediaRecorder(stream);
            recorder.ondataavailable = event => {
                videoData = event.data;
                chunks.push(videoData);
                sendData();
            };
            recorder.start(100);
        }
        catch (err) {
            console.log("An error occurred: " + err);
        }

    }
}

async function sendData() {
    const superBuffer = new Blob(chunks, {
        type: 'video/mp4'
    });

    //let base64data = window.URL.createObjectURL(superBuffer);
    let base64data = await blobToBase64(superBuffer);

    if (videoTeste) {
        chunks = [];
        videoTeste.invokeMethodAsync("SendVideoData", base64data);
        window.URL.revokeObjectURL(base64data);

    }
}

async function blobToBase64(blob) {
    return new Promise((resolve, _) => {
        const reader = new FileReader();
        reader.onloadend = () => resolve(reader.result);
        
        reader.readAsDataURL(blob);
        return reader;
    });
}

async function playVideo(source) {
    try {
        let video = document.getElementById("videoplayer");

        video.srcObject = null;
        let currentTime = video.currentTime;

        let file = await fetch(source).then(r => r.blob());

        video.src = file;
        video.currentTime = currentTime;
     
        video.play();
    }
    catch (err) {
        console.log("An error occurred: " + err);
    }
}


window.OnClassWebCam = {
    start: async (options) => {
        await onStart(options);
    },
    videoPlayer: async (source) => {
        await playVideo(source);
    },
    dotNetHelper: async (dotNetHelper) => {
        videoTeste = dotNetHelper;
    }
};

The C# Front Code:

using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.JSInterop;
using System.Text.Json;


namespace OnClassBlazor.Pages
{
    public class VideoTesteBase : ComponentBase
    {
        [Inject]
        protected IJSRuntime JSRuntime { get; set; }

        private HubConnection? hubConnection;

        protected string DataAtual = DateTime.Now.ToString();
        protected string SourceVideo = string.Empty;

        public async Task Start()
        {
            await JSRuntime.InvokeVoidAsync("OnClassWebCam.start", options);

        }

        protected override async Task OnInitializedAsync()
        {
            var dotNetReference = DotNetObjectReference.Create(this);
            await JSRuntime.InvokeVoidAsync("OnClassWebCam.dotNetHelper", dotNetReference);

            hubConnection = new HubConnectionBuilder()
                .WithUrl(@"http://localhost:5000/videohub")
                .ConfigureLogging(o => { 
                    o.SetMinimumLevel(LogLevel.Trace);
                })
                .Build();

            hubConnection.On<string>("ReceiveStream", (source) =>
            {
                JSRuntime.InvokeVoidAsync("OnClassWebCam.videoPlayer", source);
            });

            await hubConnection.StartAsync();
        }

        [JSInvokable]
        public async Task SendVideoData(string stream)
        {
            Console.WriteLine($"stream size {stream.Length}");
            if (IsConnected)
            {               
                await hubConnection.SendAsync("UploadStreamBytes", stream);
            }
        }

        public bool IsConnected =>
            hubConnection?.State == HubConnectionState.Connected;

        public async ValueTask DisposeAsync()
        {
            if (hubConnection is not null)
            {
                await hubConnection.DisposeAsync();
            }
        }

        protected WebCamOptions options = new WebCamOptions()
        {
            CanvasID = "canvas",
            VideoID = "video"
        };

        protected override void OnInitialized()
        {

        }
        
    }

    public class WebCamOptions
    {
        public int Width { get; set; } = 960;
        public int Height { get; set; } = 540;
        public string VideoID { get; set; }
        public string CanvasID { get; set; }
        public string Filter { get; set; } = null;
    }
}

The C# Hub code:

using Microsoft.AspNetCore.SignalR;
using System.Text.Json;
using System.Threading.Channels;

namespace OnClass.API.Hubs
{
    public class VideoHub : Hub
    {
        public async Task SendStream(object stream)
        {
            await Clients.All.SendAsync("ReceiveMessage", stream);
        }

        public async Task UploadStreamBytes(string stream)
        {
            Console.WriteLine($"UploadStreamBytes size: {stream.Length}");
            await Clients.All.SendAsync("ReceiveStream", stream);

        }
    }
}

The component code:

@page "/videochat"
@inherits VideoTesteBase


<h3>VideoTeste</h3>

<div id="container">
    <video id="@options.VideoID"
    autoplay="true" muted="muted" 
    width="@options.Width"
    height="@options.Height">
    </video>

    <button id="start" @onclick="Start" disabled="@(!IsConnected)">Start Video</button>
</div>
<div id="videodastream">
    <video id="videoplayer"
    autoplay="true" muted="muted" 
    width="100"
    height="100">
    </video>

    <button id="aqui" >Video</button>
</div>

The error in client

rafaelvol
  • 33
  • 5
  • The browser developer panel might help. You could set break points in your javascript playVideo() function to confirm that it is working. You could also check for errors and warnings in the console pane. And you could also check the network pane to monitor your web socket traffic. If you have not used these tools before then see [MDN - What are browser developer tools?](https://developer.mozilla.org/en-US/docs/Learn/Common_questions/What_are_browser_developer_tools) or in [Português](https://developer.mozilla.org/pt-BR/docs/Learn/Common_questions/What_are_browser_developer_tools) – Yogi Apr 01 '22 at 00:19

0 Answers0