3

It is stated in this post Using new Unity VideoPlayer and VideoClip API to play video that one can "retrieve texture for each frame if needed"

What's the proper way to get the current frame as a Texture2D, please?

EDIT:

After the answer I did this but it's not working:

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;

public class AverageColorFromTexture : MonoBehaviour {

    public VideoClip videoToPlay;
    public Light lSource;

    private Color targetColor;
    private VideoPlayer videoPlayer;
    private VideoSource videoSource;
    private Renderer rend;
    private Texture tex;
    private AudioSource audioSource;

    void Start()
    {
        Application.runInBackground = true;
        StartCoroutine(playVideo());
    }

    IEnumerator playVideo()
    {

        rend = GetComponent<Renderer>();

        videoPlayer = gameObject.AddComponent<VideoPlayer>();
        audioSource = gameObject.AddComponent<AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            Debug.Log("Preparing Video");
            yield return null;
        }
        Debug.Log("Done Preparing Video");

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        //Enable new frame Event
        videoPlayer.sendFrameReadyEvents = true;

        //Subscribe to the new frame Event
        videoPlayer.frameReady += OnNewFrame;

        //Play Video
        videoPlayer.Play();

        //Play Sound
        audioSource.Play();

        Debug.Log("Playing Video");
        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            yield return null;
        }
        Debug.Log("Done Playing Video");
    }

    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        Texture2D videoFrame = (Texture2D)source.texture;

        targetColor = CalculateAverageColorFromTexture(videoFrame);
        lSource.color = targetColor ;
    }


    Color32 CalculateAverageColorFromTexture(Texture2D tex)
    {
        Color32[] texColors = tex.GetPixels32();
        int total = texColors.Length;
        float r = 0;
        float g = 0;
        float b = 0;

        for(int i = 0; i < total; i++)
        {
            r += texColors[i].r;
            g += texColors[i].g;
            b += texColors[i].b;
        }
        return new Color32((byte)(r / total) , (byte)(g / total) , (byte)(b / total) , 0);
    }
}
Programmer
  • 121,791
  • 22
  • 236
  • 328
Jayme
  • 57
  • 1
  • 2
  • 7

1 Answers1

8

You can do that properly in three steps:

  1. Enable new frame event by setting VideoPlayer.sendFrameReadyEvents to true.

  2. Subscribe to the VideoPlayer.frameReady event

  3. The function you assigned to the VideoPlayer.frameReady event will be called when new frame is available. Just access the video frame from the VideoPlayer it will pass into the parameter by casting VideoPlayer.texture to Texture2D.

That's it.


In code:

Before video.Play() add these:

// Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;

// Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;

This is your OnNewFrame function signature.

void OnNewFrame(VideoPlayer source, long frameIdx)
{
    Texture2D videoFrame = (Texture2D)source.texture;
    // Do anything with the videoFrame Texture.
}

It's worth noting that it's costly to enable that event. Make sure that you need each frame before doing this.

Both Texture2D videoFrame = (Texture2D)source.texture; and and Texture2D videoFrame = source.texture as Texture2D; failed.

I put Debug.Log(source.texture); inside the OnNewFrame function and got:

TempBuffer 294 320x240 (UnityEngine.RenderTexture)

So, it looks like the Video.texture property is returning RenderTexture type not Texture type like it should.

We have to convert the RenderTexture to Texture2D.

void Start()
{
    videoFrame = new Texture2D(2, 2);]
    ...
}

//Initialize in the Start function
Texture2D videoFrame;

void OnNewFrame(VideoPlayer source, long frameIdx)
{
    RenderTexture renderTexture = source.texture as RenderTexture;

    if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
    {
        videoFrame.Resize(renderTexture.width, renderTexture.height);
    }
    RenderTexture.active = renderTexture;
    videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
    videoFrame.Apply();
    RenderTexture.active = null;

    targetColor = CalculateAverageColorFromTexture(videoFrame);
    lSource.color = targetColor;
}

The complete code from your question:

public class AverageColorFromTexture : MonoBehaviour
{
    public VideoClip videoToPlay;
    public Light lSource;

    private Color targetColor;
    private VideoPlayer videoPlayer;
    private VideoSource videoSource;
    private Renderer rend;
    private Texture tex;
    private AudioSource audioSource;

    void Start()
    {
        videoFrame = new Texture2D(2, 2);
        Application.runInBackground = true;
        StartCoroutine(playVideo());
    }

    IEnumerator playVideo()
    {
        rend = GetComponent<Renderer>();

        videoPlayer = gameObject.AddComponent<VideoPlayer>();
        audioSource = gameObject.AddComponent<AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            Debug.Log("Preparing Video");
            yield return null;
        }
        Debug.Log("Done Preparing Video");

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        //Enable new frame Event
        videoPlayer.sendFrameReadyEvents = true;

        //Subscribe to the new frame Event
        videoPlayer.frameReady += OnNewFrame;

        //Play Video
        videoPlayer.Play();

        //Play Sound
        audioSource.Play();

        Debug.Log("Playing Video");
        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            yield return null;
        }
        Debug.Log("Done Playing Video");
    }

    //Initialize in the Start function
    Texture2D videoFrame;

    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        RenderTexture renderTexture = source.texture as RenderTexture;


        if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
        {
            videoFrame.Resize(renderTexture.width, renderTexture.height);
        }
        RenderTexture.active = renderTexture;
        videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
        videoFrame.Apply();
        RenderTexture.active = null;

        targetColor = CalculateAverageColorFromTexture(videoFrame);
        lSource.color = targetColor;
    }

    Color32 CalculateAverageColorFromTexture(Texture2D tex)
    {
        Color32[] texColors = tex.GetPixels32();
        int total = texColors.Length;
        float r = 0;
        float g = 0;
        float b = 0;

        for (int i = 0; i < total; i++)
        {
            r += texColors[i].r;
            g += texColors[i].g;
            b += texColors[i].b;
        }
        return new Color32((byte)(r / total), (byte)(g / total), (byte)(b / total), 0);
    }
}
Neuron
  • 5,141
  • 5
  • 38
  • 59
Programmer
  • 121,791
  • 22
  • 236
  • 328
  • Thanks very much for your reply and for taking time to code your answer! ...but I get a 'Cannot convert type UnityEngine.Video.VideoSource to UnityEngine.Texture2D' error when casting the VideoPlayer texture! – Jayme Mar 12 '17 at 13:10
  • My answer said twice that you convert the `VideoPlayer.texture` **not** `VideoPlayer` to `Texture2D`. Please cop the function from my answer directly. – Programmer Mar 12 '17 at 13:17
  • Thanks for the reply! I've done exactly as you said...but still getting a casting error on the `Texture2D videoFrame = (Texture2D)source.texture;` inside the OnNewFrame function! Would you please help me point what I'm doing wrong? Thanks – Jayme Mar 12 '17 at 18:28
  • Check my EDITED answer. I didn't try my solution. I just did and found the problem. – Programmer Mar 12 '17 at 20:11
  • Thanks @Programmer it solved my issue https://stackoverflow.com/questions/47974403/convert-videoplayer-texture-as-texture2d-in-unity3?noredirect=1#comment82920712_47974403 – Muhammad Faizan Khan Dec 26 '17 at 09:33
  • But i am still unable to understand that why rendertexture.active has used and resize function. – Muhammad Faizan Khan Dec 26 '17 at 09:34
  • `videoPlayer.texture` returns `RenderTexture` not `Texture` or `Texture2D`. To convert `RenderTexture` to `Texture`, you have to save the `RenderTexture` to `RendeRenderTexture.active` which makes it the active Texture in this frame. You can now use `ReadPixels` to capture it since `ReadPixels` is used to capture pixels block area from the currently **active** `RenderTexture`. The `Resize` function is used so that I do not have to create new `Resize` every frame that `OnNewFrame` function is called which is expensive. I simply resize it if the frame changes size which happens sometimes. – Programmer Dec 26 '17 at 13:36