文章目录
- 效果1
- CSharp
- Shader
- 效果2
- CSharp
- 效果3
- CSharp
- Shader
- Project
- References
高斯模糊的算法思路我就不再描述,自行百度。
效果1
CSharp
using UnityEngine;
public class GaussianBlurPP : MonoBehaviour
{
private static int _BlurSize_hash = Shader.PropertyToID("_BlurSize");
[Range(0, 4)]
public int iterations = 3;
[Range(0.2f, 3.0f)]
public float blurSpread = .6f;
[Range(1, 8)]
public int downSample = 2;
public Material mat;
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (mat == null)
{
Graphics.Blit(source, destination);
return;
}
var rw = Screen.width / downSample;
var rh = Screen.height / downSample;
var rt0 = RenderTexture.GetTemporary(rw, rh, 0);
rt0.filterMode = FilterMode.Bilinear;
// 先将远source复制到rt0,
Graphics.Blit(source, rt0);
for (int i = 0; i < iterations; i++)
{
mat.SetFloat(_BlurSize_hash, 1 + i * blurSpread);
var rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt1.filterMode = FilterMode.Bilinear;
// horizontal blur
Graphics.Blit(rt0, rt1, mat, 0);
// 调用Graphics.Blit(source, destination)后
// 如果source不重新Get,会导致游戏崩溃(UnityEditor也会崩)
RenderTexture.ReleaseTemporary(rt0);
rt0 = rt1;
rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt1.filterMode = FilterMode.Bilinear;
// vertical blur
Graphics.Blit(rt0, rt1, mat, 1);
RenderTexture.ReleaseTemporary(rt0);
rt0 = rt1;
}
// 将rt0复制到destination
Graphics.Blit(rt0, destination);
RenderTexture.ReleaseTemporary(rt0);
}
}
其中的RenderTexture.Get/Release
看得头都晕,但其实最好还是调用他来处理比较好,说不定unity什么时候会更新这个API的内部逻辑。
但我在看了这篇文章后,我发现可以这样写(当我还是推荐会用Get/Release
的写法):
using UnityEngine;
public class GaussianBlurPP : MonoBehaviour
{
public enum ReuseRT
{
NoDiscardContent,
DiscardContenet,
}
private static int _BlurSize_hash = Shader.PropertyToID("_BlurSize");
[Range(0, 4)]
public int iterations = 3;
[Range(0.2f, 3.0f)]
public float blurSpread = .6f;
[Range(1, 8)]
public int downSample = 2;
public Material mat;
public ReuseRT reuseRT;
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (reuseRT == ReuseRT.NoDiscardContent) InnerOnRenderImage1(source, destination);
else InnerOnRenderImage2(source, destination);
}
private void InnerOnRenderImage1(RenderTexture source, RenderTexture destination)
{
if (mat == null)
{
Graphics.Blit(source, destination);
return;
}
var rw = Screen.width / downSample;
var rh = Screen.height / downSample;
var rt0 = RenderTexture.GetTemporary(rw, rh, 0);
rt0.filterMode = FilterMode.Bilinear;
// 先将远source复制到rt0,
Graphics.Blit(source, rt0);
for (int i = 0; i < iterations; i++)
{
mat.SetFloat(_BlurSize_hash, 1 + i * blurSpread);
var rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt1.filterMode = FilterMode.Bilinear;
//mat.SetVector(_Offset_hash, new Vector4(1, 0, 2, 0));
// horizontal blur
//Graphics.Blit(rt0, rt1, mat);
Graphics.Blit(rt0, rt1, mat, 0);
// 调用Graphics.Blit(source, destination)后
// 如果source不重新Get,会导致游戏崩溃(UnityEditor也会崩)
RenderTexture.ReleaseTemporary(rt0);
rt0 = rt1;
rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt1.filterMode = FilterMode.Bilinear;
// vertical blur
//mat.SetVector(_Offset_hash, new Vector4(0, 1, 0, 2));
//Graphics.Blit(rt0, rt1, mat);
Graphics.Blit(rt0, rt1, mat, 1);
RenderTexture.ReleaseTemporary(rt0);
rt0 = rt1;
}
// 将rt0复制到destination
Graphics.Blit(rt0, destination);
RenderTexture.ReleaseTemporary(rt0);
}
private void InnerOnRenderImage2(RenderTexture source, RenderTexture destination)
{
if (mat == null)
{
Graphics.Blit(source, destination);
return;
}
var rw = Screen.width / downSample;
var rh = Screen.height / downSample;
var rt0 = RenderTexture.GetTemporary(rw, rh, 0);
var rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt0.filterMode = FilterMode.Bilinear;
rt1.filterMode = FilterMode.Bilinear;
// 先将远source复制到rt0,
Graphics.Blit(source, rt0);
for (int i = 0; i < iterations; i++)
{
mat.SetFloat(_BlurSize_hash, 1 + i * blurSpread);
//mat.SetVector(_Offset_hash, new Vector4(1, 0, 2, 0));
// horizontal blur
//Graphics.Blit(rt0, rt1, mat);
Graphics.Blit(rt0, rt1, mat, 0);
// 调用Graphics.Blit(source, destination)后
rt0.DiscardContents();
rt1.DiscardContents();
// vertical blur
//mat.SetVector(_Offset_hash, new Vector4(0, 1, 0, 2));
//Graphics.Blit(rt0, rt1, mat);
Graphics.Blit(rt1, rt0, mat, 1);
rt0.DiscardContents();
rt1.DiscardContents();
}
// 将rt0复制到destination
Graphics.Blit(rt0, destination);
RenderTexture.ReleaseTemporary(rt0);
RenderTexture.ReleaseTemporary(rt1);
}
}
我添加了一个ReuseType
的枚举,默认就是原来的NoDiscardContent
,选DiscardContent
后,就可以不用Get/Release
的方式,代码就简洁清晰。
Shader
// jave.lin 2020.03.18 - 高斯模糊
Shader "Custom/GaussianBlur" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
_BlurSize ("BlurSize", Float) = 1
}
CGINCLUDE
#include "UnityCG.cginc"
struct appdata {
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f {
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float4 uv01 : TEXCOORD1;
float4 uv23 : TEXCOORD2;
};
sampler2D _MainTex;
float4 _MainTex_TexelSize;
float _BlurSize;
// 使用两个vert方法的方式,性能会更好一些
v2f vert_h (appdata v) {
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
float2 ts = _MainTex_TexelSize.xy;
float2 offset1 = float2(1, 0);
float2 offset2 = float2(2, 0);
o.uv01.xy = v.uv + offset1 * ts * _BlurSize; // 左1
o.uv01.zw = v.uv + offset1 * -ts * _BlurSize; // 右1
o.uv23.xy = v.uv + offset2 * ts * _BlurSize; // 左2
o.uv23.zw = v.uv + offset2 * -ts * _BlurSize; // 右2
return o;
}
v2f vert_v (appdata v) {
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
float2 ts = _MainTex_TexelSize.xy;
float2 offset1 = float2(0, 1);
float2 offset2 = float2(0, 2);
o.uv01.xy = v.uv + offset1 * ts * _BlurSize; // 上1
o.uv01.zw = v.uv + offset1 * -ts * _BlurSize; // 下1
o.uv23.xy = v.uv + offset2 * ts * _BlurSize; // 上2
o.uv23.zw = v.uv + offset2 * -ts * _BlurSize; // 下2
return o;
}
fixed4 frag (v2f i) : SV_Target {
fixed4 sum = tex2D(_MainTex, i.uv) * 0.4026;
sum += tex2D(_MainTex, i.uv01.xy) * 0.2442; // 左1 | 上1
sum += tex2D(_MainTex, i.uv01.zw) * 0.2442; // 右1 | 下1
sum += tex2D(_MainTex, i.uv23.xy) * 0.0545; // 左2 | 上2
sum += tex2D(_MainTex, i.uv23.zw) * 0.0545; // 右2 | 下2
return sum;
}
ENDCG
SubShader {
Cull Off ZWrite Off ZTest Always
Pass {
CGPROGRAM
#pragma vertex vert_h
#pragma fragment frag
ENDCG
}
Pass {
CGPROGRAM
#pragma vertex vert_v
#pragma fragment frag
ENDCG
}
}
}
效果2
使用了CommandBuffer方式。
这种效果很明显是错误的:
CSharp
因为CommandBuffer的使用方式不对
看看下面的代码:
using UnityEngine;
using UnityEngine.Rendering;
/// <summary>
/// jave.lin 2020.03.18
/// 改用CommandBuffer的方式来实现
/// </summary>
public class GaussianBlurPP1 : MonoBehaviour
{
private static int _BlurSize_hash = Shader.PropertyToID("_BlurSize");
private static int _RT0_hash = Shader.PropertyToID("_RT0");
private static int _RT1_hash = Shader.PropertyToID("_RT1");
[Range(0, 4)]
public int iterations = 3;
[Range(0.2f, 3.0f)]
public float blurSpread = .6f;
[Range(1, 8)]
public int downSample = 2;
public Material mat;
private Camera cam;
private CommandBuffer cb;
private RenderTexture rt;
private void Start()
{
cam = GetComponent<Camera>();
cb = new CommandBuffer();
cb.name = "Before Image Effect - Gaussian Blur";
cam.AddCommandBuffer(CameraEvent.BeforeImageEffects, cb);
}
private void Update()
{
if (mat == null) return;
if (cb != null) cb.Clear();
var rw = Screen.width / downSample;
var rh = Screen.height / downSample;
if (rt == null || rt.width != rw || rt.height != rh)
{
rt = RenderTexture.GetTemporary(rw, rh, 0);
rt.filterMode = FilterMode.Bilinear;
}
cb.GetTemporaryRT(_RT0_hash, rw, rh, 0, FilterMode.Bilinear);
cb.GetTemporaryRT(_RT1_hash, rw, rh, 0, FilterMode.Bilinear);
// 先将ColorBuffer复制到rt0,
cb.Blit(BuiltinRenderTextureType.CurrentActive, _RT0_hash);
for (int i = 0; i < iterations; i++)
{
// 这里要注意,因为cb的API调用了就会生产指令
// 而这里因为mat是一个引用,所以不论这里设置少次
// 下面的cb.Blit时,是不会记录这次mat的内部参数的
// 它只会在CameraEvent出发cb执行是,内部才会开始mat的数据。所以_BlurSize_hash对应的shader变量会是最后一次设置的值。
// 所以导致了与GaussianBlurPP.cs的效果时不一致的
// 由此可以推到,cb只适合在那些记录指令过程中状态值不会改变的情况下使用
mat.SetFloat(_BlurSize_hash, 1 + i * blurSpread);
// horizontal blur
cb.Blit(_RT0_hash, _RT1_hash, mat, 0);
// vertical blur
cb.Blit(_RT1_hash, _RT0_hash, mat, 1);
}
// 将rt0复制到rt
cb.Blit(_RT0_hash, rt);
cb.ReleaseTemporaryRT(_RT1_hash);
cb.ReleaseTemporaryRT(_RT0_hash);
}
private void OnDestroy()
{
if (cb != null)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeImageEffects, cb);
cb.Dispose();
cb = null;
}
if (rt != null)
{
RenderTexture.ReleaseTemporary(rt);
rt = null;
}
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (mat == null) Graphics.Blit(source, destination);
else
{
Graphics.Blit(rt, destination, mat, 2);
}
}
}
代码中必须留意for循环内的注释说明:
// 这里要注意,因为cb的API调用了就会生产指令
// 而这里因为mat是一个引用,所以不论这里设置少次
// 下面的cb.Blit时,是不会记录这次mat的内部参数的
// 它只会在CameraEvent出发cb执行是,内部才会开始mat的数据。所以_BlurSize_hash对应的shader变量会是最后一次设置的值。
// 所以导致了与GaussianBlurPP.cs的效果时不一致的
// 由此可以推到,cb只适合在那些记录指令过程中状态值不会改变的情况下使用
这个怎理解呢,我们打开FrameDebugger来看看,效果1与效果2的区别:
下面是正常效果1的绘制过程,每一次随着for
迭代,_BlurSize
是都对应变化的,因为这种方式是立即绘制,立即使用绘制参数状态的。
下面在不正确使用CommandBuffer的方式:
可以看到_BlurSize
一直都是10
,就如同我上面注释所说,CommandBuffer
是延迟调用的,具体调用时间就看你在Camera.AddCommandBuffer(CameraEvent, CommandBuffer)
中的CameraEvent
来决定,这例子中,我是放在:BeforeImageEffect
中调用。
效果3
了解CommandBuffer的基本使用特性后,我们来正确的使用以下吧,试试实现:控制需要模糊的像素部分,或是屏幕掉不需要模糊的像素部分。
复制一份:GaussianBlurPP.cs 脚本,改名为:GaussianBlurPP_Mask_Blur.cs,添加CommandBuffer
处理:
Command Buffers 简单介绍可参考我翻译的一篇:Unity Graphics Command Buffers 图形指令缓存
代码如下:
CSharp
using UnityEngine;
using UnityEngine.Rendering;
/// <summary>
/// jave.lin 2020.03.19
/// 带屏蔽像素部分的模糊后效
/// </summary>
public class GaussianBlurPP_Mask_Blur : MonoBehaviour
{
private static int _BlurSize_hash = Shader.PropertyToID("_BlurSize");
private static int _SrcTex_hash = Shader.PropertyToID("_SrcTex");
[Range(0, 4)]
public int iterations = 3; // 高斯模糊的次数
[Range(0.2f, 3.0f)]
public float blurSpread = .6f; // 每次模糊采样纹素距离的缩放因数
[Range(1, 8)]
public int downSample = 2; // 降低blur sample texture的尺寸
public Material mat; // 后处理的材质
public GameObject selectedBlurObj; // 需要选中绘制的对象
public Material maskMat; // 绘制对象的mask材质
private Camera cam; // 相机
private CommandBuffer cb; // 用CommandBuffer在Start时就记录一些底层API指令
private void Start()
{
cam = GetComponent<Camera>();
cam.depthTextureMode |= DepthTextureMode.Depth;
cb = new CommandBuffer();
cb.name = "Before Image Effect - Gaussian Blur Mask"; // jave.lin:给cb其名字,在FrameDebugger可以方便查看
// jave.lin:给纹理的名字弄个hash,后面API需要,也可以拿这个hash当做RT来使用RenderTargetIdentifier有隐式转换
int _MaskTex_hash = Shader.PropertyToID("_MaskTex");
// jave.lin:二次线性滤波采样,只用RG两通道,每个通道16 bits。因为有用到深度信息,所以我用大一些的位数格式
cb.GetTemporaryRT(_MaskTex_hash, -1, -1, 0, FilterMode.Bilinear, RenderTextureFormat.RG32);
// jave.lin:设置渲染目标
cb.SetRenderTarget(_MaskTex_hash);
// jave.lin:先对渲染目标清理内容
cb.ClearRenderTarget(true, true, new Color(0, 1, 0, 0)); // 默认ColorBuffer的R通道为0,记录Mask,G通道为1,记录depth
// jave.lin:将需要绘制的对象的各个Renderer都绘制到渲染目标上
var renders = selectedBlurObj.GetComponentsInChildren<Renderer>(true);
// jave.lin:所有Renderer都绘制到_MaskTex纹理上
foreach (var r in renders) cb.DrawRenderer(r, maskMat);
// jave.lin:给全局shader设置_MaskTex纹理对象
cb.SetGlobalTexture(_MaskTex_hash, _MaskTex_hash);
// jave.lin:command buffer指令缓存执行,添加在:ImageEfefcts执行前(就所有MonoBehaviour.OnRenderImages函数执行前)
cam.AddCommandBuffer(CameraEvent.BeforeImageEffects, cb);
}
private void OnDestroy()
{
if (cb != null)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeImageEffects, cb);
cb.Dispose();
cb = null;
}
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (mat == null)
{
Graphics.Blit(source, destination);
return;
}
var rw = Screen.width / downSample;
var rh = Screen.height / downSample;
var rt0 = RenderTexture.GetTemporary(rw, rh, 0);
var rt1 = RenderTexture.GetTemporary(rw, rh, 0);
rt0.filterMode = FilterMode.Bilinear;
rt1.filterMode = FilterMode.Bilinear;
rt0.name = "rt0";
rt1.name = "rt1";
// 先将远source复制到rt0,
Graphics.Blit(source, rt0);
for (int i = 0; i < iterations; i++)
{
mat.SetFloat(_BlurSize_hash, 1 + i * blurSpread);
// horizontal blur
Graphics.Blit(rt0, rt1, mat, 0);
rt0.DiscardContents();
rt1.DiscardContents();
// vertical blur
Graphics.Blit(rt1, rt0, mat, 1);
rt0.DiscardContents();
rt1.DiscardContents();
}
// 将rt0复制到destination
mat.SetTexture(_SrcTex_hash, source);
Graphics.Blit(rt0, destination, mat, 2);
RenderTexture.ReleaseTemporary(rt0);
RenderTexture.ReleaseTemporary(rt1);
}
}
Shader
这里有两个shader。
下面是绘制Mask部分的Shader:
// jave.lin 2020.03.18 绘制mask的内容
Shader "Custom/DrawMask" {
SubShader {
Tags { "RenderType"="Opaque" }
Pass {
ZWrite off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f {
float4 vertex : SV_POSITION;
float linear01Depth : TEXCOORD1;
};
v2f vert (float4 vertex : POSITION) {
v2f o = (v2f)0;
o.vertex = UnityObjectToClipPos(vertex);
o.linear01Depth = -(UnityObjectToViewPos(vertex).z * _ProjectionParams.w);
return o;
}
fixed4 frag (v2f i) : SV_Target {
return fixed4(1, i.linear01Depth, 0, 0);
}
ENDCG
}
}
}
另一个是后效用到Mask的Shader输出的RT的Shader:
// jave.lin 2020.03.18 - 带有遮罩的高斯模糊
Shader "Custom/MaskGaussianBlur" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
_BlurSize ("BlurSize", Float) = 1
_MaskDepthThreshold ("MaskDepthThreshold", Range(0, 0.1)) = 0.06
}
CGINCLUDE
#include "UnityCG.cginc"
struct appdata {
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f {
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float4 uv01 : TEXCOORD1;
float4 uv23 : TEXCOORD2;
};
sampler2D _MainTex;
float4 _MainTex_TexelSize;
float _BlurSize;
sampler2D _MaskTex;
sampler2D _SrcTex;
sampler2D _CameraDepthTexture;
float _MaskDepthThreshold;
v2f vert_h (appdata v) {
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
float2 ts = _MainTex_TexelSize.xy;
float2 offset1 = float2(1, 0);
float2 offset2 = float2(2, 0);
o.uv01.xy = v.uv + offset1 * ts * _BlurSize; // 左1
o.uv01.zw = v.uv + offset1 * -ts * _BlurSize; // 右1
o.uv23.xy = v.uv + offset2 * ts * _BlurSize; // 左2
o.uv23.zw = v.uv + offset2 * -ts * _BlurSize; // 右2
return o;
}
v2f vert_v (appdata v) {
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
float2 ts = _MainTex_TexelSize.xy;
float2 offset1 = float2(0, 1);
float2 offset2 = float2(0, 2);
o.uv01.xy = v.uv + offset1 * ts * _BlurSize; // 上1
o.uv01.zw = v.uv + offset1 * -ts * _BlurSize; // 下1
o.uv23.xy = v.uv + offset2 * ts * _BlurSize; // 上2
o.uv23.zw = v.uv + offset2 * -ts * _BlurSize; // 下2
return o;
}
fixed4 frag (v2f i) : SV_Target {
fixed4 sum = tex2D(_MainTex, i.uv) * 0.4026;
sum += tex2D(_MainTex, i.uv01.xy) * 0.2442; // 左1 | 上1
sum += tex2D(_MainTex, i.uv01.zw) * 0.2442; // 右1 | 下1
sum += tex2D(_MainTex, i.uv23.xy) * 0.0545; // 左2 | 上2
sum += tex2D(_MainTex, i.uv23.zw) * 0.0545; // 右2 | 下2
return sum;
}
float4 vert_blit_mask (appdata v, out float2 o_uv : TEXCOORD0) : SV_POSITION { o_uv = v.uv; return UnityObjectToClipPos(v.vertex); }
fixed4 frag_blit_mask (float4 vertex : SV_POSITION, float2 uv : TEXCOORD0) : SV_Target {
float buff = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, uv));
float4 maskInfo = tex2D(_MaskTex, uv);
int mask = maskInfo.r;
float maskDepth = maskInfo.g;
// return maskDepth;
// return buff;
// return lerp(tex2D(_MainTex, uv), tex2D(_SrcTex, uv), mask > 0 && abs(buff - maskDepth) < _MaskDepthThreshold);
if (mask > 0) return lerp(tex2D(_SrcTex, uv), tex2D(_MainTex, uv), smoothstep(0, _MaskDepthThreshold, abs(buff - maskDepth)));
else {
return tex2D(_MainTex, uv);
}
}
ENDCG
SubShader {
Cull Off ZWrite Off ZTest Always
Pass {
CGPROGRAM
#pragma vertex vert_h
#pragma fragment frag
ENDCG
}
Pass {
CGPROGRAM
#pragma vertex vert_v
#pragma fragment frag
ENDCG
}
Pass {
CGPROGRAM
#pragma vertex vert_blit_mask
#pragma fragment frag_blit_mask
ENDCG
}
}
}
先看看CommandBuffer Draw出来的_MaskTex:
默认ColorBuffer的R通道为0,记录Mask,G通道为1,记录depth,绘制出来的RG纹理结果。
R通道:Mask,G通道:Depth
运行效果:(可以除掉人物x,y坐标为,还有深度判断来剔除不模糊人物)
Project
backup : UnityShader_PP_GaussianBlur_IncludeMask_2018.3.0f2
References
- 高斯模糊部分参考《UnityShader 入门精要》- 12.4
- Unity Graphics Command Buffers 图形指令缓存
- Graphics Command Buffers
- Using Command Buffers in Unity: Selective Bloom
- Unity3d中渲染到RenderTexture的原理,几种方式以及一些问题 - RenderTexture.DiscardContent() 的使用,参考这里的说明