单相机做分屏混合

Posted tiancaiwrk

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了单相机做分屏混合相关的知识,希望对你有一定的参考价值。

  做了一个单相机实现分屏混合的功能, 需求大概就是在同一视角下, 相机通过不同的CullingMask获取不同的渲染图片RenderTexture之后, 通过某种方式一起显示在界面的功能. 其实核心逻辑就是怎样用一个相机渲染不同的图片罢了, 直接上代码:

using System.Collections;
using System.Collections.Generic;
using UnityEngine;

[ExecuteInEditMode]
public class BlendRenderTexture : MonoBehaviour
{
    public enum BlendDirection
    {
        Horizontal = 0,
        Vertical = 1
    }

    [SerializeField]
    [Range(0, 1)]
    public float blend = 0.5f;
    [SerializeField]
    public BlendDirection blendDirection = BlendDirection.Horizontal;
    [SerializeField]
    public LayerMask tagLayerMask;
    [SerializeField]
    public RenderingTools.BlendRenderTextureTarget blendRenderTextureTarget;

    private bool m_grabbing = false;

    private void OnEnable()
    {
        if(blendRenderTextureTarget == null)
        {
            blendRenderTextureTarget = new RenderingTools.BlendRenderTextureTarget("Unlit/BlendRenderTexture");
        }
    }
    private void OnDisable()
    {
        if(blendRenderTextureTarget != null)
        {
            blendRenderTextureTarget.Release();
        }
    }

    void OnRenderImage(RenderTexture src, RenderTexture dest)
    {
        if(m_grabbing)
        {
            m_grabbing = false;
            Graphics.Blit(src, dest);
            return;
        }
        if(blendRenderTextureTarget.renderTexture && blendRenderTextureTarget.material)
        {
            blendRenderTextureTarget.material.SetTexture("_Left", src);
            blendRenderTextureTarget.material.SetTexture("_Right", blendRenderTextureTarget.renderTexture);
            blendRenderTextureTarget.material.SetFloat("_Blend", Mathf.Clamp01(blend));
            blendRenderTextureTarget.material.SetInt("_Direction", (int)blendDirection);
            Graphics.Blit(src, dest, blendRenderTextureTarget.material);
        }
        else
        {
            Graphics.Blit(src, dest);
        }
    }

    private void LateUpdate()
    {
        RenderTargetTexture();
    }
    
    public void RenderTargetTexture()
    {
        var material = blendRenderTextureTarget.GetMaterial();
        if (m_grabbing = material)
        {
            var lastMask = Camera.main.cullingMask;
            var lastTex = Camera.main.targetTexture;
            Camera.main.cullingMask = tagLayerMask;
            Camera.main.targetTexture = blendRenderTextureTarget.GetRenderTexture();
            Camera.main.Render();
            Camera.main.cullingMask = lastMask;
            Camera.main.targetTexture = lastTex;
        }      
    }
}

  在LateUpdate中请求目标图片渲染, 标记了m_grabbing之后, 调用到OnRenderImage, 直接就把目标渲染图片输出到我们的临时RenderTexture上了, 然后再通过正常渲染时调用OnRenderImage之后, 就会通过Material进行混合了.

BlendRenderTextureTarget 只是一个资源的封装:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public static class RenderingTools
{
    [System.Serializable]
    public class BlendRenderTextureTarget
    {
        [SerializeField]
        public Material material = null;
        [SerializeField]
        public string shaderName = string.Empty;

        public RenderTexture renderTexture { get; set; }

        public BlendRenderTextureTarget(string shaderName)
        {
            this.shaderName = shaderName;
        }

        public Material GetMaterial()
        {
            if(material == false)
            {
                var shader = Shader.Find(shaderName);
                if(shader)
                {
                    material = new Material(shader);
                    material.hideFlags = HideFlags.DontSave;
                }
            }
            return material;
        }

        public RenderTexture GetRenderTexture()
        {
            if(renderTexture)
            {
                if(renderTexture.width != Screen.width || renderTexture.height != Screen.height)
                {
                    RenderTexture.ReleaseTemporary(renderTexture);
                    renderTexture = null;
                }
            }
            if(renderTexture == false)
            {
                renderTexture = RenderTexture.GetTemporary(Screen.width, Screen.height, 24, RenderTextureFormat.ARGB32);
                renderTexture.hideFlags = HideFlags.DontSave;
            }
            return renderTexture;
        }

        public void Release()
        {
            if(renderTexture)
            {
                RenderTexture.ReleaseTemporary(renderTexture);
                renderTexture = null;
            }
            if(material)
            {
                material = null;
            }
        }
    }
}

 

  混用用到的Shader也很简单:

Shader "Unlit/BlendRenderTexture"
{
    Properties
    {
        _MainTex("Texture", 2D) = "white" {}
        _Left("Texture", 2D) = "white" {}
        _Right("Texture", 2D) = "white" {}
        _Blend("Blend", Range(0,1)) = 0.5
        [Enum(Horizontal,0,Vertical,1)] _Direction("Blend Direction", Float) = 0
    }
    SubShader
    {
        Tags { "RenderType"="Opaque" }
        LOD 100

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            // make fog work
            #pragma multi_compile_fog
            
            #include "UnityCG.cginc"

            struct appdata
            {
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float2 uv : TEXCOORD0;
                UNITY_FOG_COORDS(1)
                float4 vertex : SV_POSITION;
            };

            sampler2D _MainTex;
            float4 _MainTex_ST;

            sampler2D _Left;
            sampler2D _Right;
            float _Blend;
            float _Direction;
            
            v2f vert (appdata v)
            {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                o.uv = TRANSFORM_TEX(v.uv, _MainTex);
                UNITY_TRANSFER_FOG(o,o.vertex);
                return o;
            }
            
            fixed4 frag (v2f i) : SV_Target
            { 
                fixed4 col = 1;
                // sample the texture
                bool blend_left = (_Direction == 0) ? (i.uv.x <= _Blend) : (i.uv.y <= _Blend);
                if (blend_left)
                {
                    col = tex2D(_Left, i.uv);
                }
                else 
                {
                    col = tex2D(_Right, i.uv);
                }                
                // apply fog
                UNITY_APPLY_FOG(i.fogCoord, col);

                return col;
            }
            ENDCG
        }
    }
}

  

以上是关于单相机做分屏混合的主要内容,如果未能解决你的问题,请参考以下文章

单镜头反光相机白平衡

片段中的 Android 相机预览

如何从Android片段中的相机获取图像

在片段之间切换时如何处理相机?

如何在片段中从相机捕获图像,

从片段中的相机拍照