Get current frame Texture from VideoPlayer

前端 未结 1 598
春和景丽
春和景丽 2020-12-19 18:15

It is stated in this post Using new Unity VideoPlayer and VideoClip API to play video that one can \"retrieve texture for each frame if needed\"

What\'s the proper w

相关标签:
1条回答
  • 2020-12-19 19:05

    You can do that properly in three steps:

    1.Enable new frame event by setting VideoPlayer.sendFrameReadyEvents to true.

    2.Subscribe to the VideoPlayerframeReady event

    3.The function you assigned to the VideoPlayerframeReady event will be called when new frame is available. Just access the video frame from the VideoPlayer it will pass into the parameter by casting VideoPlayer.texture to Texture2D.

    That's it.


    In code:

    Before video.Play() add these:

    //Enable new frame Event
    videoPlayer.sendFrameReadyEvents = true;
    
    //Subscribe to the new frame Event
    videoPlayer.frameReady += OnNewFrame;
    

    This is your OnNewFrame function signature.

    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        Texture2D videoFrame = (Texture2D)source.texture;
        //Do anything with the videoFrame Texture.
    }
    

    It's worth noting that it's costly to enable that event. Make sure that you need each frame before doing this.

    EDIT:

    Both Texture2D videoFrame = (Texture2D)source.texture; and and Texture2D videoFrame = source.texture as Texture2D; failed.

    I put Debug.Log(source.texture); inside the OnNewFrame function and got:

    TempBuffer 294 320x240 (UnityEngine.RenderTexture)

    So, it looks like the Video.texture property is returning RenderTexture type not Texture type like it should.

    We have to convert the RenderTexture to Texture2D.

    void Start()
    {
        videoFrame = new Texture2D(2, 2);]
        ...
    }
    
    //Initialize in the Start function
    Texture2D videoFrame;
    
    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        RenderTexture renderTexture = source.texture as RenderTexture;
    
        if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
        {
            videoFrame.Resize(renderTexture.width, renderTexture.height);
        }
        RenderTexture.active = renderTexture;
        videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
        videoFrame.Apply();
        RenderTexture.active = null;
    
        targetColor = CalculateAverageColorFromTexture(videoFrame);
        lSource.color = targetColor;
    }
    

    The Complete Code from your question:

    public class AverageColorFromTexture : MonoBehaviour
    {
        public VideoClip videoToPlay;
        public Light lSource;
    
        private Color targetColor;
        private VideoPlayer videoPlayer;
        private VideoSource videoSource;
        private Renderer rend;
        private Texture tex;
        private AudioSource audioSource;
    
        void Start()
        {
            videoFrame = new Texture2D(2, 2);
            Application.runInBackground = true;
            StartCoroutine(playVideo());
        }
    
        IEnumerator playVideo()
        {
            rend = GetComponent<Renderer>();
    
            videoPlayer = gameObject.AddComponent<VideoPlayer>();
            audioSource = gameObject.AddComponent<AudioSource>();
    
            //Disable Play on Awake for both Video and Audio
            videoPlayer.playOnAwake = false;
            audioSource.playOnAwake = false;
    
            videoPlayer.source = VideoSource.VideoClip;
            videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
            videoPlayer.EnableAudioTrack(0, true);
            videoPlayer.SetTargetAudioSource(0, audioSource);
    
            //Set video To Play then prepare Audio to prevent Buffering
            videoPlayer.clip = videoToPlay;
            videoPlayer.Prepare();
    
            //Wait until video is prepared
            while (!videoPlayer.isPrepared)
            {
                Debug.Log("Preparing Video");
                yield return null;
            }
            Debug.Log("Done Preparing Video");
    
            //Assign the Texture from Video to Material texture
            tex = videoPlayer.texture;
            rend.material.mainTexture = tex;
    
            //Enable new frame Event
            videoPlayer.sendFrameReadyEvents = true;
    
            //Subscribe to the new frame Event
            videoPlayer.frameReady += OnNewFrame;
    
            //Play Video
            videoPlayer.Play();
    
            //Play Sound
            audioSource.Play();
    
            Debug.Log("Playing Video");
            while (videoPlayer.isPlaying)
            {
                Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
                yield return null;
            }
            Debug.Log("Done Playing Video");
        }
    
        //Initialize in the Start function
        Texture2D videoFrame;
    
        void OnNewFrame(VideoPlayer source, long frameIdx)
        {
            RenderTexture renderTexture = source.texture as RenderTexture;
    
    
            if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
            {
                videoFrame.Resize(renderTexture.width, renderTexture.height);
            }
            RenderTexture.active = renderTexture;
            videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
            videoFrame.Apply();
            RenderTexture.active = null;
    
            targetColor = CalculateAverageColorFromTexture(videoFrame);
            lSource.color = targetColor;
        }
    
        Color32 CalculateAverageColorFromTexture(Texture2D tex)
        {
            Color32[] texColors = tex.GetPixels32();
            int total = texColors.Length;
            float r = 0;
            float g = 0;
            float b = 0;
    
            for (int i = 0; i < total; i++)
            {
                r += texColors[i].r;
                g += texColors[i].g;
                b += texColors[i].b;
            }
            return new Color32((byte)(r / total), (byte)(g / total), (byte)(b / total), 0);
        }
    }
    
    0 讨论(0)
提交回复
热议问题