介绍

之前学习过入门精要,之后就很少接触了,现在接触URP,再学习一遍入门精要也顺便学习下HLSL,
主要是对着链接中的URP HLSL入门学习进行学习,会有一定自己的扩展

基础光照模型

基础公式

  • Lambert: max(0,dot(L,N))
  • HalfLambert: max(0,dot(L,N)) * 0.5 + 0.5
  • Phong: pow(max(0,dot(reflect(-L,N), V)), Gloss)
  • BlinnPhong: pow(max(0,dot(normalize(L+V), N)), Gloss)

代码

  • Lambert / HalfLambert

    / HalfLambert
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    Shader "Unlit/Lambert"
    {
    Properties
    {
    _MainTex ("Texture", 2D) = "white" {}
    _BaseColor ("BaseColor", Color) = (1,1,1,1)
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _BaseColor;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float4 normalOS:NORMAL;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    float3 normalWS:TEXCOORD1;
    };

    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    o.normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
    return o;
    }

    float4 Frag(Varyings i) :SV_Target{
    Light mylight = GetMainLight();
    real4 LightColor = real4(mylight.color, 1);
    float3 lightDir = normalize(mylight.direction);
    float lambert = dot(normalize(i.normalWS), lightDir);
    float halfLambert = lambert * 0.5f + 0.5f;

    float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
    return color * _BaseColor * halfLambert * LightColor;
    }

    ENDHLSL

    Pass
    {
    Tags{ "LightMode" = "UniversalForward" }
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    ENDHLSL
    }
    }
    }

  • Phong / BlinnPhong

    / BlinnPhong
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    Shader "Unlit/Phong"
    {
    Properties
    {
    _MainTex("Texture", 2D) = "white" {}
    _GlossColor("BaseColor", Color) = (1,1,1,1)
    _Gloss("Gloss", Range(1, 256)) = 1
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _GlossColor;
    float _Gloss;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float4 normalOS:NORMAL;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    float3 normalWS:TEXCOORD1;
    float3 viewDirWS:TEXCOORD2;
    };

    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    o.viewDirWS = normalize(_WorldSpaceCameraPos.xyz - TransformObjectToWorld(i.positionOS.xyz));//得到世界空间的视图方向
    o.normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
    return o;
    }

    float4 Frag(Varyings i) :SV_Target{
    Light mylight = GetMainLight();
    real4 LightColor = real4(mylight.color, 1);
    float3 lightDir = normalize(mylight.direction);
    float3 viewDir = normalize(i.viewDirWS);
    float3 worldNormal = normalize(i.normalWS);
    float phong = pow(max(dot(reflect(-lightDir, worldNormal), viewDir), 0), _Gloss);
    float blinnPhong = pow(max(dot(normalize(lightDir + viewDir), worldNormal), 0), _Gloss);

    float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
    float4 diffuse = LightColor * col * max(dot(lightDir, worldNormal), 0);
    float4 specular = _GlossColor * blinnPhong * LightColor;
    return float4(specular.rgb + diffuse.rgb, col.a);
    }

    ENDHLSL

    Pass
    {
    Tags{ "LightMode" = "UniversalForward" }
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    ENDHLSL
    }
    }
    }

总结

  • TransformObjectToHClip
  • TransformObjectToWorld
  • TransformObjectToWorldNormal
  • _WorldSpaceCameraPos
  • 光照信息: #include “Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl”
  • SubShader{HLSLINCLUDE … ENDHLSL} Pass{HLSLPROGRAM … ENDHLSL} (第一次写Pass中写成HLSLEINCLUDE了,没报错效果又一直是错的,注意了!!!)

法线贴图

基础简介

  1. 我们采用在世界坐标系下,在片元着色器中进行计算。
    定义顶点着色器拿到数据的结构体,我们需要顶点位置,uv,顶点法线,顶点切线

  2. 获得世界坐标系下的:顶点位置,法线,切线,副切线
    计算副切线时,叉乘法线,切线,并在乘切线的w值判断正负,在乘负奇数缩放影响因子。

    1
    o.BtangentWS= cross(o.normal.xyz,o.tangent.xyz) * i.tangent.w;
  3. 片元处理中采样法线贴图,得切线空间法线,在将其转换到世界空间

    1
    2
    3
    4
    5
    float3x3 T2W = {i.tangentWS.xyz,i.BtangentWS.xyz,i.normalWS.xyz}; 
    float4 norTex = SAMPLE_TEXTURE2D(_NormalTex, sampler_NormalTex, i.uv);
    float3 nomralTS = UnpackNormalScale(norTex, _NormalScale);
    normalTS.z=pow((1-pow(normalTS.x,2)-pow(normalTS.y,2)),0.5); //规范化法线 不影响x,y情况下规范化z轴
    float3 normalWS = normalize(mul(normalTS,T2W));
  4. 使用带入法线贴图计算后的法线用于后续计算即可。

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
Shader "Unlit/Normal"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
[Normal]_NormalTex("NormalTex", 2D) = "bump"{}
_NormalScale("NormalScale", float) = 1
[HDR]_SpecularColor("SpecularColor", Color) = (1,1,1,1)
_Gloss("Gloss",Range(1,256)) = 1
}
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_NormalTex);
SAMPLER(sampler_NormalTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _NormalTex_ST;
float _NormalScale;
float4 _SpecularColor;
float _Gloss;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float4 tangentOS:TANGENT;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float4 uv : TEXCOORD0;
float4 normalWS:TEXCOORD1;
float4 tangentWS:TEXCOORD2;
float4 BtangentWS:TEXCOORD3;
};

Varyings vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS);
o.normalWS.xyz = normalize(TransformObjectToWorldNormal(i.normalOS));
o.tangentWS.xyz = normalize(TransformObjectToWorld(i.tangentOS));
o.BtangentWS.xyz = cross(o.normalWS.xyz, o.tangentWS.xyz) * i.tangentOS.w * unity_WorldTransformParams.w;

// 存一下世界空间坐标
float3 positionWS = TransformObjectToWorld(i.positionOS);
o.tangentWS.w = positionWS.x;
o.BtangentWS.w = positionWS.y;
o.normalWS.w = positionWS.z;

o.uv.xy = TRANSFORM_TEX(i.uv, _MainTex);
o.uv.zw = TRANSFORM_TEX(i.uv, _NormalTex);
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 norTex = SAMPLE_TEXTURE2D(_NormalTex, sampler_NormalTex, i.uv.zw);
float3 positionWS = float3(i.tangentWS.w, i.BtangentWS.w, i.normalWS.w);
float3x3 T2W = {i.tangentWS.xyz, i.BtangentWS.xyz, i.normalWS.xyz};

float3 normalTS = UnpackNormalScale(norTex, _NormalScale);
normalTS.z = pow(1 - pow(normalTS.x, 2) - pow(normalTS.y, 2), 0.5f); //规范化
float3 normalWS = mul(normalTS, T2W);
float3 viewDirWS = normalize(_WorldSpaceCameraPos.xyz - positionWS);
Light myLight = GetMainLight();
float3 lightDir = normalize(myLight.direction);

float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv.xy);

float halfLambert = dot(normalWS, lightDir) * 0.5f + 0.5f;
float3 diffuse = myLight.color * col.xyz * halfLambert;
float3 specular = myLight.color * _SpecularColor.rgb * pow(dot(normalize(viewDirWS + lightDir), normalWS), _Gloss);

return float4(diffuse + specular, col.a);
}

ENDHLSL
Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
ENDHLSL
}
}
}

总结

  • 规范化向量一般用normalize,在不想影响xy轴情况下可以使用勾股定理自己计算另一个轴的值
  • UnpackNormal,UnpackNormalScale
  • mul(), 矩阵相乘
  • 为了节省空间,可以将一些值藏在部分多余参数中,比如这次代码中,将世界空间坐标xyz分别写在切线,副切线,法线的w值上

渐变纹理

基础思路

不适用常规的模型uv,而是使用lambert/halflambert的值作为x轴或y轴,对渐变纹理图进行采样

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
Shader "Unlit/Ramp"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_RampTex("RampTex", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_RampTex);
SAMPLER(sampler_RampTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _RampTex_ST;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float3 normalWS :TEXCOORD1;
};

Varyings vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
o.normalWS = normalize(TransformObjectToWorldNormal(i.normalOS));
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);

Light light = GetMainLight();
float3 lightDir = normalize(light.direction);
float3 normalWS = normalize(i.normalWS);
float halfLambert = dot(lightDir, normalWS) * 0.5f + 0.5f;
float4 ramp = SAMPLE_TEXTURE2D(_RampTex, sampler_RampTex, float2(halfLambert, 0.5f));

return ramp * col * float4(light.color, 1);
}
ENDHLSL

Pass
{
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
ENDHLSL
}
}
}

AlphaTest

基础简介

使用clip,在片元着色器对裁剪一些像素

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
Shader "Unlit/AlphaTest"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_BaseColor("BaseColor",Color) = (1,1,1,1)
[HDR]_BurnColor("BurnColor",Color) = (1,1,1,1)
_Cutoff ("Cutoff", Range(0,1)) = 0.5
}
SubShader
{
Tags { "RenderType" = "TransparentCutout" "RenderPipeline" = "UniversalPipeline" "Queue" = "AlphaTest" }
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_RampTex);
SAMPLER(sampler_RampTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
float4 _BurnColor;
float _Cutoff;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
};

Varyings vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv) * _BaseColor;

// step(_Cutoff, col.r) = (_Cutoff <= col.r ? 1 : 0)
clip(step(_Cutoff, col.r) - 0.01);
col = lerp(col, _BurnColor, step(col.r, saturate(_Cutoff + 0.1))) ;
return col;
}
ENDHLSL

Pass
{
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
ENDHLSL
}
}
}

总结

  • clip(), 参数小于0则裁剪
  • step(a,b) 等价于 a <= b ? 1 : 0, 用于优化shader代码中的if,else

AlphaBlend

基础简介

  1. 关闭深度写入
  2. 渲染队列,渲染类型设置成Transparent 透明的
  3. 设置Blend

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
Shader "Unlit/AlphaBlend"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_AlphaTex("Texture", 2D) = "white"{}
_BaseColor("BaseColor",Color) = (1,1,1,1)
}
SubShader
{
Tags{

"RenderPipeline" = "UniversalRenderPipeline"

"IgnoreProjector" = "True"

"RenderType" = "Transparent"

"Queue" = "Transparent"

}
ZWrite Off
Blend SrcAlpha OneMinusSrcAlpha
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);
TEXTURE2D(_AlphaTex);
SAMPLER(sampler_AlphaTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _AlphaTex_ST;
float4 _BaseColor;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float4 uv : TEXCOORD0;
};

Varyings vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS);
o.uv.xy = TRANSFORM_TEX(i.uv, _MainTex);
o.uv.zw = TRANSFORM_TEX(i.uv, _AlphaTex);
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv.xy) * _BaseColor;
float alpha = SAMPLE_TEXTURE2D(_AlphaTex, sampler_AlphaTex, i.uv.zw).a;
return float4(col.rgb, alpha);
}
ENDHLSL

Pass
{
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
ENDHLSL
}
}
}

总结

  • “IgnoreProjector” = “True”, 忽视

多光源

基础

  • 首先需要获取多光源,通过GetAdditionalLightsCount(),GetAdditionalLight(index, positionWS)两个函数处理多光源
  • 将主光源计算后的颜色,叠加所有叠加光源颜色输出

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
Shader "Unlit/MulLight"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_BaseColor ("BaseColor", Color) = (1,1,1,1)
[KeywordEnum(ON,OFF)]_ADD_LIGHT("AddLight",float) = 1
}
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float3 normalWS:TEXCOORD1;
float3 positionWS:TEXCOORD2;
};

Varyings Vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
o.normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
o.positionWS = TransformObjectToWorld(i.positionOS.xyz);
return o;
}

float4 Frag(Varyings i) :SV_Target{
Light mylight = GetMainLight();
real4 LightColor = real4(mylight.color, 1);
float3 lightDir = normalize(mylight.direction);
float3 normalWS = normalize(i.normalWS);
float halfLambert = dot(normalWS, lightDir) * 0.5f + 0.5f;
float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv) * _BaseColor * halfLambert * LightColor;

// AddLight
float4 addLightColor = float4(0,0,0,1);

#if _ADD_LIGHT_ON
int lightCount = GetAdditionalLightsCount();
for (int index = 0; index < lightCount; index++)
{
Light light = GetAdditionalLight(index, i.positionWS);
addLightColor += (dot(normalWS, normalize(light.direction)) * 0.5f + 0.5f)
* real4(light.color, 1) * light.distanceAttenuation * light.shadowAttenuation;
}
#endif

return color + addLightColor;
}

ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
#pragma shader_feature _ADD_LIGHT_ON _ADD_LIGHT_OFF
ENDHLSL
}
}
}

总结

  • shader枚举开关:
    [KeywordEnum(ON,OFF)]_ADD_LIGHT(“AddLight”,float) = 1 //定义shader中的枚举 只有ON,OFF两个选项
    #pragma shader_feature _ADD_LIGHT_ON _ADD_LIGHT_OFF //定义shader_feature 规则:参数名_枚举名 (需要把所有定义的选项都放进去)
    #if _ADD_LIGHT_ON … #endif 使用

阴影投射和接收

基础

  1. 投射
    使用官方写好的阴影投射Pass UsePass “Universal Render Pipeline/Lit/ShadowCaster”
    使用官方写好的 不支持SRP Batcher, 因此自己写阴影投射Pass
    参考”Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl”

  2. 接收
    TransformWorldToShadowCoord(i.positionWS) //获得shadowcoord
    GetMainLight(shadowcoord).shadowAttenuation //获得阴影值
    #pragma multi_compile _ _MAIN_LIGHT_SHADOWS //开启阴影
    #pragma multi_compile _ _MAIN_LIGHT_SHADOWS_CASCADE //级联阴影
    #pragma multi_compile _ _SHADOWS_SOFT //柔化阴影,得到软阴影

  3. 额外光源阴影接收

代码

  • 主光源阴影接收Shader

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    Shader "Unlit/Shadow"
    {
    Properties
    {
    _MainTex("Texture", 2D) = "white" {}
    _GlossColor("BaseColor", Color) = (1,1,1,1)
    _Gloss("Gloss", Range(1, 256)) = 1
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _GlossColor;
    float _Gloss;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float4 normalOS:NORMAL;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    float3 normalWS:TEXCOORD1;
    float3 positionWS:TEXCOORD2;
    };

    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    o.positionWS = TransformObjectToWorld(i.positionOS.xyz);
    o.normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
    return o;
    }

    float4 Frag(Varyings i) :SV_Target{
    Light mylight = GetMainLight(TransformWorldToShadowCoord(i.positionWS));
    real4 LightColor = real4(mylight.color, 1);
    float3 lightDir = normalize(mylight.direction);
    float3 viewDir = normalize(_WorldSpaceCameraPos.xyz - i.positionWS);
    float3 worldNormal = normalize(i.normalWS);
    float phong = pow(max(dot(reflect(-lightDir, worldNormal), viewDir), 0), _Gloss);
    float blinnPhong = pow(max(dot(normalize(lightDir + viewDir), worldNormal), 0), _Gloss);

    float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
    float4 diffuse = LightColor * col * max(dot(lightDir, worldNormal), 0) * mylight.shadowAttenuation;
    float4 specular = _GlossColor * blinnPhong * LightColor * mylight.shadowAttenuation;
    return float4(specular.rgb + diffuse.rgb, col.a);
    }

    ENDHLSL

    Pass
    {
    Tags{ "LightMode" = "UniversalForward" }
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    #pragma multi_compile _ _MAIN_LIGHT_SHADOWS //开启阴影
    #pragma multi_compile _ _MAIN_LIGHT_SHADOWS_CASCADE //级联阴影
    #pragma multi_compile _ _SHADOWS_SOFT //柔化阴影,得到软阴影
    ENDHLSL
    }
    UsePass "Universal Render Pipeline/Lit/ShadowCaster"
    }
    }
  • 额外光源阴影接收Shader

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    // AddLight
    float4 addLightColor = float4(0, 0, 0, 1);

    #if _ADD_LIGHT_ON
    #if defined(SHADOWS_SHADOWMASK) && defined(LIGHTMAP_ON)
    half4 shadowMask = inputData.shadowMask;
    #elif !defined (LIGHTMAP_ON)
    half4 shadowMask = unity_ProbesOcclusion;
    #else
    half4 shadowMask = half4(1, 1, 1, 1);
    #endif

    int lightCount = GetAdditionalLightsCount();
    for (int index = 0; index < lightCount; index++)
    {
    Light light = GetAdditionalLight(index, i.positionWS, shadowMask);
    addLightColor += (dot(normalWS, normalize(light.direction)) * 0.5f + 0.5f)
    * real4(light.color, 1) * light.distanceAttenuation * light.shadowAttenuation;
    }
    #endif
  • 主光源投射阴影

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    //参考 "Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl"
    Pass
    {
    Name "ShadowCaster"
    Tags{"LightMode" = "ShadowCaster"}

    ZWrite On
    ZTest LEqual
    ColorMask 0

    HLSLPROGRAM
    #pragma vertex VertShadowCaster
    #pragma fragment FragShadowCaster

    Varyings VertShadowCaster(Attributes i)
    {
    Varyings o;
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    float3 positionWS = TransformObjectToWorld(i.positionOS.xyz);
    float3 normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
    Light light = GetMainLight();
    o.positionHS = TransformWorldToHClip(ApplyShadowBias(positionWS, normalWS, light.direction.xyz));

    #if UNITY_REVERSED_Z
    o.positionHS.z = min(o.positionHS.z, o.positionHS.w * UNITY_NEAR_CLIP_VALUE);
    #else
    o.positionHS.z = max(o.positionHS.z, o.positionHS.w * UNITY_NEAR_CLIP_VALUE);
    #endif
    return o;
    }

    half4 FragShadowCaster(Varyings i) :SV_Target
    {
    return 0;
    }
    ENDHLSL
    }

总结

  • 查看源码思路,最终需要得到阴影值,从Light中看到shadowAttenuation是我们需要的
    通过 GetMainLight(shadowcoord) 或者 GetMainLight(float4 shadowCoord, float3 positionWS, half4 shadowMask) 获得的阴影会被赋值
    调用了Shadow.hlsl中的 MainLightRealtimeShadow 以及 MainLightShadow
    查看其中判断的宏可以发现需要 MAIN_LIGHT_CALCULATE_SHADOWS, 全局搜索发现开启 _MAIN_LIGHT_SHADOWS 后会定义MAIN_LIGHT_CALCULATE_SHADOWS
    函数网里面跟进可以看到_MAIN_LIGHT_SHADOWS_CASCADE _SHADOWS_SOFT 还有其他一些宏,看使用情况开启即可

GetMainLight需要参数 shadowcoord, 在Shadow.hlsl 查找shadowcoord 可以找到几个函数,
再看参数和调用地方,可以确认TransformWorldToShadowCoord函数我们可以直接使用

  • 初次看URP源码,先记录下在初次使用时寻找关键函数的思路

序列帧

基础

  • 将一张序列帧图片分块,按块采样显示,间隔一定时间切换下一块

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
Shader "Unlit/SequenceFrame"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_FrameRate("FrameRate",float) = 10
_Sheet("Sheet",Vector) = (1,1,1,1)
}
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float _FrameRate;
float4 _Sheet;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
};

Varyings Vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
return o;
}

float4 Frag(Varyings i) :SV_Target{
float2 uv = 0;
uv.x = i.uv.x / _Sheet.x + frac(floor(_Time.y * _FrameRate) / _Sheet.x);
uv.y = i.uv.y / _Sheet.y + 1 - frac(floor(_Time.y * _FrameRate / _Sheet.x) / _Sheet.y);
return SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv);
}

ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}

总结

  • frac函数:取小数,frac(x) = x - (int)x;
  • _Time获取变化时间
  • 图片左下角为(0,0)点,因此y轴需要反转一下

广告牌

基础

实现效果:正方向始终朝向相机
思路:顶点着色器变换顶点坐标,使得渲染出来的模型朝向相机

  1. 将所有操作都放在模型空间,使用模型空间坐标作为锚点,则锚点为(0,0,0)
  2. 根据顶点坐标的(x,y,z)重新计算顶点坐标 pos = center + right * x + up * y + z * fwd;
  3. 需要得出right, up, fwd,fwd为朝向相机的方向,通过相机朝向可以推出,需要求right和up,
    假设我们的广告牌正方向都朝上则up = (0, 1, 0), right = cross(up, fwd)
    再重新计算up = cross(fwd, right) 得到up,right,fwd, 将计算结果作为模型空间顶点坐标进行其他计算即可

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
Shader "Unlit/ADS"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_BaseColor ("BaseColor", Color) = (1,1,1,1)
}
SubShader
{
Tags { "RenderType" = "Transparent" "RenderType" = "Transparent" "RenderPipeline" = "UniversalPipeline"}
ZWrite Off
Blend SrcAlpha OneMinusSrcAlpha
Cull Off
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float3 normalWS:TEXCOORD1;
};

Varyings Vert(Attributes i)
{
Varyings o;

//重新计算顶点坐标
float3 cameraPosOS = TransformWorldToObject(_WorldSpaceCameraPos.xyz);
float3 fwd = normalize(cameraPosOS);
float3 up = abs(fwd.y) < 0.99f ? float3(0, 1, 0) : float3(0, 0, 1);
float3 right = normalize(cross(up, fwd));
up = normalize(cross(fwd, right));
float3x3 Matrix = { right, up, fwd };
float3 posOS = mul(i.positionOS.xyz, Matrix); // = i.positionOS.x * right + i.positionOS.y * up + i.positionOS.z * fwd

o.positionHS = TransformObjectToHClip(posOS);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
o.normalWS = TransformObjectToWorldNormal(i.normalOS.xyz, true);
return o;
}

float4 Frag(Varyings i) :SV_Target{
Light mylight = GetMainLight();
real4 LightColor = real4(mylight.color, 1);
float3 lightDir = normalize(mylight.direction);
float lambert = dot(normalize(i.normalWS), lightDir);
float halfLambert = lambert * 0.5f + 0.5f;

float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
return color * _BaseColor * halfLambert * LightColor;
}

ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}

总结

  • 广告牌核心思想就是做顶点变换
  • 使用cross叉乘得垂直向量,unity中的叉乘使用左手法则

玻璃效果

基础

  • 首先需要抓屏,Build-in中通过grab pass或者传入RT,
    URP中_CameraColorTexture得到当前屏幕同等分辨率的图像,它在opaque模型和skybox渲染完成之后抓取
    通过:TEXTURE2D(_CameraColorTexture);SAMPLER(sampler_CameraColorTexture);获取
    使用_CameraColorTexture必须设置中打开Opaque Texture选项
    设置Opaque

  • 采样屏幕图像需要屏幕坐标:ComputeScreenPos(positionCS);
    i.screenPos.xy / i.screenPos.w; //获取屏幕UV,需要做齐次除法

  • 应用法线,对采样点进行偏移,可以使用世界空间法线或者切线空间法线
    世界空间的法线由世界空间确定,会随着模型的旋转而变化;
    切线空间的法线不随着模型的旋转而变换;

  • 由于_CameraColorTextrue在Opaque之后,但在Transparent之前,因此透明物体无法显示
    因此可以利用RenderFeature,Render Objects设置一个”LightMode”=”Grab”,在透明物体之后执行的透明队列
    设置RenderFeature

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
Shader "Unlit/Glass"
{
Properties
{
_NormalTex("Normal",2D) = "bump"{}
_NormalScale("NormalScale",Range(0,1)) = 1
_BaseColor("BaseColor",Color) = (1,1,1,1)
_Amount("amount",float) = 100
[KeywordEnum(WS_N,TS_N)]_NORMAL_STAGE("NormalStage",float) = 1
}
SubShader
{
Tags { "RenderType" = "Transparent" "RenderType" = "Transparent" "RenderPipeline" = "UniversalPipeline" }
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_NormalTex);
SAMPLER(sampler_NormalTex);
SAMPLER(_CameraColorTexture);
float4 _CameraColorTexture_TexelSize;//该向量是非本shader独有,不能放在常量缓冲区

CBUFFER_START(UnityPerMaterial)
float4 _NormalTex_ST;
float _NormalScale;
float _Amount;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float4 tangentOS:TANGENT;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float4 normalWS:TEXCOORD1;
float4 tangentWS:TEXCOORD2;
float4 BtangentWS:TEXCOORD3;
float4 screenPos:TEXCOORD4;
};

Varyings vert(Attributes i)
{
Varyings o;
// 使用以下函数 快速获取坐标,法线变换结果 ShaderVariablesFunctions.hlsl 引用 Core.hlsl即可
VertexPositionInputs input = GetVertexPositionInputs(i.positionOS.xyz);
o.positionHS = input.positionCS;

VertexNormalInputs normalInput = GetVertexNormalInputs(i.normalOS, i.tangentOS);
o.normalWS.xyz = normalInput.normalWS;
o.tangentWS.xyz = normalInput.tangentWS;
o.BtangentWS.xyz = normalInput.bitangentWS;

// 存一下世界空间坐标
o.tangentWS.w = input.positionWS.x;
o.BtangentWS.w = input.positionWS.y;
o.normalWS.w = input.positionWS.z;

o.screenPos = ComputeScreenPos(input.positionCS);

o.uv = TRANSFORM_TEX(i.uv, _NormalTex);
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 norTex = SAMPLE_TEXTURE2D(_NormalTex, sampler_NormalTex, i.uv);
float3 normalTS = UnpackNormalScale(norTex, _NormalScale);
normalTS.z = pow(1 - pow(normalTS.x, 2) - pow(normalTS.y, 2), 0.5f); //规范化

#if _NORMAL_STAGE_WS_N
float3 positionWS = float3(i.tangentWS.w, i.BtangentWS.w, i.normalWS.w);
float3x3 T2W = { i.tangentWS.xyz, i.BtangentWS.xyz, i.normalWS.xyz };
float3 normalWS = mul(normalTS, T2W);
float2 SS_bias = normalWS.xy * _Amount * _CameraColorTexture_TexelSize.xy;//世界空间的法线由世界空间确定,会随着模型的旋转而变化
#else
float2 SS_bias = normalTS.xy * _Amount * _CameraColorTexture_TexelSize.xy;//切线空间的法线不随着模型的旋转而变换
#endif
float2 SS_texcoord = i.screenPos.xy / i.screenPos.w;//获取屏幕UV
float4 glassColor = tex2D(_CameraColorTexture, SS_texcoord + SS_bias);//把最终的颜色输出到屏幕即可

return glassColor;
}

ENDHLSL
Pass
{
Tags{ "LightMode" = "Grab" }
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma shader_feature_local _NORMAL_STAGE_WS_N
ENDHLSL
}
}
}

总结

  • 使用ShaderVariablesFunctions.hlsl中的通用函数:
    GetVertexPositionInputs(positionOS),GetVertexNormalInputs(normalOS, tangentOS)
    快速计算法线坐标变换以及法线

  • _CameraColorTexture只在运行时生效

  • _TextureName_TexelSize:图片的宽高,这个声明在CBuffer外
    x = 1.0/width
    y = 1.0/height
    z = width
    w = height

  • _TextureName_ST:图片的Tilling 和 Offset
    x,y 对应 Tilling的 x,y
    z,w 对应 Offset的 x,y

  • 利用RenderFeature设置特殊的渲染方式

屏幕深度,护盾特效

基础

  • 获取屏幕深度图 _CameraDepthTexture;
    TEXUTRE2D(_CameraDepthTexture); SAMPLER(sampler_CameraDepthTexture);
    通过屏幕坐标采样:ComputeScreenPos(positionCS); 别忘了使用时齐次除法,这步操作通常再原片着色器

  • Linear01Depth(depth, _ZBufferParams) 获取0-1线性深度

  • URPSetting中需要打开深度图 设置Opaque

  • 护盾特效:需实现菲尼尔效果,扫光效果
    菲尼尔效果 基础公式:F0 + (1 - F0) * pow(1.0 - dot(viewDirWS, normalWS), 5.0);
    F0为材质的菲尼尔系数
    通用沿y轴扫光效果:float flow=saturate(pow(1-abs(frac(i.positionWS.y0.3-_Time.y0.2)-0.5),10)0.3);
    float4 flowcolor=flow
    _emissioncolor;

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
Shader "Unlit/DepthShield"
{
Properties
{
_MainTex("MainTex",2D) = "white"{}
_BaseColor("BaseColor",Color) = (1,1,1,1)
_F0 ("F0", float) = 0.01
_EmissionColor ("EmissionColor", Color) = (1,1,1,1)
_Speed("Speed", float) = 1
}
SubShader
{
Tags { "RenderType" = "Transparent" "RenderType" = "Transparent" "RenderPipeline" = "UniversalPipeline" }
Blend SrcAlpha OneMinusSrcAlpha
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_CameraDepthTexture);
SAMPLER(sampler_CameraDepthTexture);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
float4 _EmissionColor;
float _F0;
float _Speed;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float4 screenPos:TEXCOORD1;
float3 positionWS:TEXCOORD2;
float3 normalWS:TEXCOORD3;
};

Varyings vert(Attributes i)
{
Varyings o;
// 使用以下函数 快速获取坐标,法线变换结果 ShaderVariablesFunctions.hlsl 引用 Core.hlsl即可
VertexPositionInputs input = GetVertexPositionInputs(i.positionOS.xyz);
o.positionHS = input.positionCS;
o.positionWS = input.positionWS;
o.screenPos = ComputeScreenPos(input.positionCS);
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
o.normalWS = normalize(TransformObjectToWorldNormal(i.normalOS.xyz));
return o;
}

float4 frag(Varyings i) : SV_Target
{
float4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv) * _BaseColor;
float2 uvSS = i.screenPos.xy / i.screenPos.w;//获取屏幕UV
float4 depthColor = SAMPLE_TEXTURE2D(_CameraDepthTexture, sampler_CameraDepthTexture, uvSS);
float screenDepth = Linear01Depth(depthColor.a, _ZBufferParams);

float depth = i.positionHS.z;
depth = Linear01Depth(depth, _ZBufferParams);//得到模型的线性深度
float edge = saturate(depth - screenDepth + 0.005) * 100;//计算接触光

float3 viewDirWS = normalize(_WorldSpaceCameraPos.xyz - i.positionWS);

float fresnel = _F0 + (1 - _F0) * pow(1 - dot(viewDirWS, i.normalWS), 5);
float flow = saturate(pow(1 - abs(frac(i.positionWS.y * 0.3 - _Time.y * _Speed) - 0.5), 10) * 0.3);
float flow1 = saturate(pow(1 - abs(frac(i.positionWS.y * 0.5 - _Time.y * _Speed) - 0.5), 10) * 0.5);
float flow2 = saturate(pow(1 - abs(frac(i.positionWS.y * 0.7 - _Time.y * _Speed) - 0.5), 10) * 0.7);
float4 flowcolor = (flow + flow1 + flow2) * _EmissionColor;

return float4(col.rgb, fresnel + edge) + flowcolor;
}

ENDHLSL
Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
ENDHLSL
}
}
}

总结

  • _ZBufferParams: UnityInput.hlsl 官方链接
    // x = 1-far/near
    // y = far/near
    // z = x/far
    // w = y/far
    #if UNITY_REVERSED_Z
    // x = -1+far/near
    // y = 1
    // z = x/far
    // w = 1/far

特定物体描边效果

链接

urp管线的自学hlsl之路 第二十五篇 Render Feature制作特定模型外描边

基础

整理一下描边过程:

  1. 获得基础纯色图:按层级/渲染队列,过滤出需要渲染的物体,返回纯色
    基础纯色图

  2. 对纯色图进行模糊操作
    纯色图模糊

  3. 纯色模糊图 - 纯色图:得到外描边图
    外描边图

  4. 实际上模糊图 - 纯色图,内部边缘也会有一段负数渐变区,
    将其显示出来相当于内描边,取绝对值abs,可以得到内外描边效果图
    内外描边图

  5. 将描边图与原图叠加输出

代码

  1. 基础纯色图:
    绘制之前需要设置一下输出目标:ConfigureTarget(temp);
    最终是要将物体绘制出来:context.DrawRenderers(renderingData.cullResults, ref draw, ref filter);
    设置FilteringSettings filter = new FilteringSettings(queue, layer);
    设置DrawingSettings draw = CreateDrawingSettings(shaderTag, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags);

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    //第一个pass 绘制纯色的图像
        class DrawSoildColorPass : ScriptableRenderPass
    {
    Setting mysetting = null;
    OutlineRenderFeather SelectOutline = null;
    ShaderTagId shaderTag = new ShaderTagId("DepthOnly");//只有在这个标签LightMode对应的shader才会被绘制
            FilteringSettings filter;

    public DrawSoildColorPass(Setting setting, OutlineRenderFeather render)
    {
    mysetting = setting;
    SelectOutline = render;

    renderPassEvent = setting.passEvent;

                //过滤设定
                RenderQueueRange queue = new RenderQueueRange();
    queue.lowerBound = Mathf.Min(setting.QueueMax, setting.QueueMin);
    queue.upperBound = Mathf.Max(setting.QueueMax, setting.QueueMin);
    filter = new FilteringSettings(queue, setting.layer);
    }

    public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
    {
    int temp = Shader.PropertyToID("_MyTempColor1");

    RenderTextureDescriptor desc = cameraTextureDescriptor;
    cmd.GetTemporaryRT(temp, desc);
    SelectOutline.solidcolorID = temp;
    ConfigureTarget(temp); //设置它的输出RT
    ConfigureClear(ClearFlag.All, Color.black);
    }

    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
    mysetting.mat.SetColor("_SoildColor", mysetting.color);
    CommandBuffer cmd = CommandBufferPool.Get("提取固有色pass");

                //绘制设定
                var draw = CreateDrawingSettings(shaderTag, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags);
    draw.overrideMaterial = mysetting.mat;
    draw.overrideMaterialPassIndex = 0;

                //开始绘制(准备好了绘制设定和过滤设定)
                context.DrawRenderers(renderingData.cullResults, ref draw, ref filter);
    context.ExecuteCommandBuffer(cmd);
    CommandBufferPool.Release(cmd);
    }
    }
  2. 获取原图
    renderer.cameraColorTarget;

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
    if(setting.mat != null)
    {
    RenderTargetIdentifier sour = renderer.cameraColorTarget; //原图
    renderer.EnqueuePass(_DrawSoildColorPass);
    _DrawBlurPass.Setup(sour);
    renderer.EnqueuePass(_DrawBlurPass);
    }
    }
  3. 模糊图
    拿到基础纯色图,进行模糊即可,模糊操作可以参考
    高品质后处理:十种图像模糊算法的总结与实现
    后处理效果汇总

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
class DrawBlurPass : ScriptableRenderPass
{
Setting setting = null;
OutlineRenderFeather SelectOutline = null;
RenderTargetIdentifier sour;
BlurBlitter _blurBlitter = new BlurBlitter();
public DrawBlurPass(Setting setting, OutlineRenderFeather render)
{
this.setting = setting;
SelectOutline = render;

renderPassEvent = setting.passEvent;

}

public void Setup(RenderTargetIdentifier sour)
{
this.sour = sour;

if (this.setting.ColorType == Setting.EColorType.INcolorON)
{
this.setting.mat.EnableKeyword("_INCOLORON");
this.setting.mat.DisableKeyword("_INCOLOROFF");
}
else
{
this.setting.mat.EnableKeyword("_INCOLOROFF");
this.setting.mat.DisableKeyword("_INCOLORON");
}
}

public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
CommandBuffer cmd = CommandBufferPool.Get("颜色计算");

RenderTextureDescriptor renderTextureDescriptor = renderingData.cameraData.cameraTargetDescriptor;

int SourID = Shader.PropertyToID("_SourTex");
cmd.GetTemporaryRT(SourID, renderTextureDescriptor);
cmd.CopyTexture(sour, SourID);

//模糊处理
int BlurID = Shader.PropertyToID("_BlurTex");
cmd.GetTemporaryRT(BlurID, renderTextureDescriptor);
_blurBlitter.SetSource(BlurID, renderTextureDescriptor);

_blurBlitter.downSample = 1;
_blurBlitter.blurScale = setting.blur;
_blurBlitter.iteratorCount = setting.passloop;
_blurBlitter.blurType = BlurType.Box;

_blurBlitter.Render(cmd);

cmd.Blit(SelectOutline.solidcolorID, sour, setting.mat, 1);//在第1个pass里合并所有图像

cmd.ReleaseTemporaryRT(SelectOutline.solidcolorID);
cmd.ReleaseTemporaryRT(SourID);
cmd.ReleaseTemporaryRT(BlurID);
            context.ExecuteCommandBuffer(cmd);
}
}
  1. 合并图像
    两个Pass都比较简单,不多说明了
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    Shader "Unlit/Outline"
    {
    Properties
    {
    _MainTex("Texture", 2D) = "white" {}
    _SoildColor("SoildColor",Color) = (1,1,1,1)
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    TEXTURE2D(_SourTex);
    SAMPLER(sampler_SourTex);

    TEXTURE2D(_BlurTex);
    SAMPLER(sampler_BlurTex);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _SoildColor;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    };

    ENDHLSL

    // Pass 0 纯色
    Pass
    {
    Tags{ "LightMode" = "UniversalForward" }
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag

    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    return o;
    }

    float4 Frag(Varyings i) :SV_Target{
    return _SoildColor;
    }
    ENDHLSL
    }
    // Pass 1 合并图像
    Pass
    {
    Tags{ "LightMode" = "UniversalForward" }
    HLSLPROGRAM
    #pragma vertex Vert1
    #pragma fragment Frag1
    #pragma multi_compile_local _INCOLORON _INCOLOROFF

    Varyings Vert1(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = TRANSFORM_TEX(i.uv, _MainTex);
    return o;
    }

    float4 Frag1(Varyings i) :SV_Target{
    float4 blur = SAMPLE_TEXTURE2D(_BlurTex, sampler_BlurTex, i.uv);
    float4 sour = SAMPLE_TEXTURE2D(_SourTex, sampler_SourTex, i.uv);
    float4 soild = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);

    real4 color;
    #if _INCOLORON
    color = abs(blur - soild) + sour;
    #elif _INCOLOROFF
    color = saturate(blur - soild) + sour;
    #endif
    return color;
    }
    ENDHLSL
    }
    }
    }

总结

  • 获取基础纯色图的过程,是一个基础过滤物体到渲染的过程,可以参考URP源码RenderObjectsPass.cs

边缘检测描边

基础

参考Unity Shader入门精要,12.3,13.4章节

  • 基础边缘检测
  1. 卷积:通常为22,33的方形区域,每个格子对应一个权重值,
    采样一个像素点时,对其周围方形空间采样按权重值叠加后再除以个数得到当前像素值

  2. 常见的边缘检测算子:Roberts,Prewitt,Sobel等
    常见的边缘检测算子

  3. 得到两个方向的梯度值,Gx,Gy
    G = sqrt(GxGx,GyGy); 通常优化开方: G = abs(Gx) + abs(Gy)
    G 值表示梯度值,梯度值越大表示越在边缘

  4. 检测结果分析
    这种方式检测,会产生很多我们不希望的边缘线,如光照影响的,法线影响的,阴影影响的等
    基础边缘检测

  • 通过深度和深度法线上进行边缘检测
  1. 需要获取深度图,以及深度法线图,通过_CameraDepthTexture可以获得深度图,但是URP不支持深度法线图
    因此需要获得深度法线图:自定义RenderFeather,在不透明物体渲染之前使用”Hidden/Internal-DepthNormalsTexture”渲染一次,将图片存为”_CameraDepthNormalsTexture”

  2. _CameraDepthNormalsTexture.xyz存法线信息,_CameraDepthNormalsTexture.w存深度信息,
    UnityCG.cginc中定义: DecodeFloatRG 解码深度:线性深度 = z + w/255
    DecodeFloatRG

  3. _CameraDepthNormalsTexutre.xyz法线信息并非真实法线,需要对其进行解码操作获得观察空间法线
    UnityCG.cginc中定义:DecodeViewNormalStereo
    DecodeViewNormalStereo

  4. 镜像对比方式:如像素(x,y+1)与(x,y-1),(x+1,y)与(x-1,y)
    比较法线以及深度是否相同,相同返回1,不同返回0,代码里对应CheckSame函数,小于一定范围则视为相同

  5. 检测结果分析
    可以明显看到这种方式比第一种方式减少了很多不必要的边缘线
    深度法线边缘检测

代码

  • 基础边缘检测

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    Shader "Unlit/OutlinePPS"
    {
    Properties
    {
    _MainTex ("Texture", 2D) = "white" {}
    _EdgeColor ("EdgeColor", Color) = (1,1,1,1)
    _EdgeOnly ("EdgeOnly",Range(0,1)) = 1
    _BackgroundColor ("BackgroundColor", Color) = (1,1,1,1)
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    Cull Off ZWrite Off ZTest Always
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _MainTex_TexelSize;

    float4 _EdgeColor;
    float4 _BackgroundColor;
    float _EdgeOnly;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv[9] : TEXCOORD0;
    };
    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);

    // 3*3区域uv
    o.uv[0] = i.uv + _MainTex_TexelSize.xy * (-1, -1);
    o.uv[1] = i.uv + _MainTex_TexelSize.xy * (0, -1);
    o.uv[2] = i.uv + _MainTex_TexelSize.xy * (1, -1);
    o.uv[3] = i.uv + _MainTex_TexelSize.xy * (-1, 0);
    o.uv[4] = i.uv + _MainTex_TexelSize.xy * (0, 0);
    o.uv[5] = i.uv + _MainTex_TexelSize.xy * (1, 0);
    o.uv[6] = i.uv + _MainTex_TexelSize.xy * (-1, 1);
    o.uv[7] = i.uv + _MainTex_TexelSize.xy * (0, 1);
    o.uv[8] = i.uv + _MainTex_TexelSize.xy * (1, 1);
    return o;
    }

    float luminance(float3 color)
    {
    return 0.2125*color.r + 0.7154*color.g + 0.0721*color.b;
    }

    // 主要用于描边检测
    float sobel(Varyings i)//定义索伯检测函数
    {
    const float Gx[9] = { -1,-2,-1,0,0,0,1,2,1 };
    const float Gy[9] = { -1,0,1,-2,0,2,-1,0,1 };

    float texColor = 0;
    float edgeX = 0;
    float edgeY = 0;
    for (int it = 0; it < 9; it++)
    {
    texColor = luminance(SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv[it]));
    edgeX += texColor * Gx[it];
    edgeY += texColor * Gy[it];
    }
    return 1 - abs(edgeX) - abs(edgeY);
    }

    float4 Frag(Varyings i) :SV_Target{

    float edge = sobel(i);
    float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv[4]);

    float4 color1 = lerp(_EdgeColor, color, edge);
    float4 color2 = lerp(_EdgeColor, _BackgroundColor, edge);
    return lerp(color1, color2, _EdgeOnly);
    }

    ENDHLSL

    Pass
    {
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    ENDHLSL
    }
    }
    }

  • 通过深度和深度法线边缘检测

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    Shader "Unlit/OutlinePPSDepth"
    {
    Properties
    {
    _MainTex ("Texture", 2D) = "white" {}
    _EdgeColor ("EdgeColor", Color) = (1,1,1,1)
    _EdgeOnly ("EdgeOnly",Range(0,1)) = 1
    _BackgroundColor ("BackgroundColor", Color) = (1,1,1,1)
    _SampleDistance("SampleDistance",Range(0,1)) = 1
    _SensitivityDepth ("SensitivityDepth",Range(0,3)) = 1
    _SensitivityNormals ("SensitivityNormals",Range(0,3)) = 1
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    Cull Off ZWrite Off ZTest Always
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    TEXTURE2D(_CameraDepthNormalsTexture);
    SAMPLER(sampler_CameraDepthNormalsTexture);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _MainTex_TexelSize;

    float4 _EdgeColor;
    float4 _BackgroundColor;
    float _EdgeOnly;
    float _SampleDistance;
    float _SensitivityDepth;
    float _SensitivityNormals;
    CBUFFER_END

    struct Attributes
    {
    float4 positionOS:POSITION;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    };
    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = i.uv;
    return o;
    }

    float CheckSame(float2 centerNormal, float centerDepth, float2 sampleNormal, float sampleDepth)
    {
    float2 diffNormal = abs(centerNormal - sampleNormal) * _SensitivityNormals;
    float diffDepth = abs(centerDepth - sampleDepth) * _SensitivityDepth;

    int isSameNormal = (diffNormal.x + diffNormal.y) < 0.1f;
    int isSameDepth = diffDepth < 0.1 * centerNormal;
    return isSameNormal * isSameDepth ? 1.0 : 0.0;
    }

    float DecodeFloatRG(float2 enc)
    {
    float2 kDecodeDot = float2(1.0, 1 / 255.0);
    return dot(enc, kDecodeDot);
    }

    float3 DecodeViewNormalStereo(float4 enc4)
    {
    float kScale = 1.7777;
    float3 nn = enc4.xyz * float3(2 * kScale, 2 * kScale, 0) + float3(-kScale, -kScale, 1);
    float g = 2.0 / dot(nn.xyz, nn.xyz);
    float3 n;
    n.xy = g * nn.xy;
    n.z = g - 1;
    return n;
    }

    float sobel(Varyings i)
    {
    float2 uv[9];
    float2 normal[9];
    float depth[9];

    uv[0] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (-1, -1);
    uv[1] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (0, -1);
    uv[2] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (1, -1);
    uv[3] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (-1, 0);
    uv[4] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (0, 0);
    uv[5] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (1, 0);
    uv[6] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (-1, 1);
    uv[7] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (0, 1);
    uv[8] = i.uv + _SampleDistance * _MainTex_TexelSize.xy * (1, 1);

    for (int it = 0; it < 9; it++)
    {
    real4 depthnormalTex = SAMPLE_TEXTURE2D(_CameraDepthNormalsTexture, sampler_CameraDepthNormalsTexture, uv[it]);
    normal[it] = depthnormalTex.xy; //临时法线 没使用DecodeViewNormalStereo,使用后有问题...
    depth[it] = DecodeFloatRG(depthnormalTex.zw); // depthnormalTex.z * 1.0 + depthnormalTex.w / 255.0; //得到线性深度
    }
    float edge = 1;

    edge *= CheckSame(normal[0], depth[0], normal[8], depth[8]);
    edge *= CheckSame(normal[1], depth[1], normal[7], depth[7]);
    edge *= CheckSame(normal[2], depth[2], normal[6], depth[6]);
    edge *= CheckSame(normal[3], depth[3], normal[5], depth[5]);

    return edge;
    }

    float4 Frag(Varyings i) :SV_Target{

    float edge = sobel(i);
    float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);

    float4 mainColor = lerp(_EdgeColor, color, edge);
    float4 noMainColor = lerp(_EdgeColor, _BackgroundColor, edge);
    return lerp(mainColor, noMainColor, _EdgeOnly);
    }

    ENDHLSL

    Pass
    {
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    ENDHLSL
    }
    }
    }

  • 深度法线RenderFeather

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    using UnityEngine;
    using UnityEngine.Rendering;
    using UnityEngine.Rendering.Universal;

    /// <summary>
    /// 获取深度法线
    /// </summary>
    public class DepthNormalsRenderFeather : ScriptableRendererFeature
    {
    class DepthNormalsRenderPass : ScriptableRenderPass
    {
    private RenderTargetHandle destination { get; set; }
    private Material depthNormalsMaterial = null;
    private FilteringSettings m_FilteringSettings;
    ShaderTagId m_ShaderTagId = new ShaderTagId("DepthOnly");

    public DepthNormalsRenderPass(RenderQueueRange renderQueueRange, LayerMask layerMask, Material material)
    {
    m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
    this.depthNormalsMaterial = material;
    }

    public void Setup(RenderTargetHandle destination)
    {
    this.destination = destination;
    }

    public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
    {
    RenderTextureDescriptor descriptor = cameraTextureDescriptor;
    descriptor.depthBufferBits = 32;
    descriptor.colorFormat = RenderTextureFormat.ARGB32;

    cmd.GetTemporaryRT(destination.id, descriptor, FilterMode.Point);
    ConfigureTarget(destination.Identifier());
    ConfigureClear(ClearFlag.All, Color.black);
    }

    // Here you can implement the rendering logic.
    // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
    // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
    // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
    CommandBuffer cmd = CommandBufferPool.Get("深度法线获取pass");
    using (new ProfilingSample(cmd, "DepthNormals Prepass"))
    {
    context.ExecuteCommandBuffer(cmd);
    cmd.Clear();
    var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags;
    var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags);
    drawSettings.perObjectData = PerObjectData.None;

    ref CameraData cameraData = ref renderingData.cameraData;
    Camera camera = cameraData.camera;
    if (cameraData.isStereoEnabled)
    context.StartMultiEye(camera);

    drawSettings.overrideMaterial = depthNormalsMaterial;

    context.DrawRenderers(renderingData.cullResults, ref drawSettings,
    ref m_FilteringSettings);

    cmd.SetGlobalTexture("_CameraDepthNormalsTexture", destination.id);

    }
    context.ExecuteCommandBuffer(cmd);
    CommandBufferPool.Release(cmd);
    }

    // Cleanup any allocated resources that were created during the execution of this render pass.
    public override void OnCameraCleanup(CommandBuffer cmd)
    {
    if (destination != RenderTargetHandle.CameraTarget)
    {
    cmd.ReleaseTemporaryRT(destination.id);
    destination = RenderTargetHandle.CameraTarget;
    }
    }
    }

    DepthNormalsRenderPass m_ScriptablePass;
    RenderTargetHandle depthNormalsTexture;
    Material depthNormalsMaterial;

    /// <inheritdoc/>
    public override void Create()
    {
    depthNormalsMaterial = CoreUtils.CreateEngineMaterial("Hidden/Internal-DepthNormalsTexture");
    m_ScriptablePass = new DepthNormalsRenderPass(RenderQueueRange.opaque, -1, depthNormalsMaterial);

    // Configures where the render pass should be injected.
    m_ScriptablePass.renderPassEvent = RenderPassEvent.AfterRenderingPrePasses;
    depthNormalsTexture.Init("_CameraDepthNormalsTexture");
    }

    // Here you can inject one or multiple render passes in the renderer.
    // This method is called when setting up the renderer once per-camera.
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
    m_ScriptablePass.Setup(depthNormalsTexture);
    renderer.EnqueuePass(m_ScriptablePass);
    }
    }

总结

  • URP并不支持深度法线图,通过RenderFeather自己生成一张并设置为全局变量
  • 代码中并没有使用DecodeViewNormalStereo函数,在测试过程中使用DecodeViewNormalStereo解码,实际效果不知道为什么出现问题(待解决)
    但是可以直接使用为解码的法线xy,因为计算过程xy与解码后成正比,不影响实际结果

科幻扫描效果

链接

urp管线的自学hlsl之路 第二十三篇 科幻扫描效果前篇
urp管线的自学hlsl之路 第二十四篇 科幻扫描效果后篇

基础

  1. 首先要知道这是一个后处理,我们要做的是在屏幕上画线,再附加扫描
    再屏幕上画线条需要区分可画区域与不可画区域,通过深度图来区分,只有有深度的地方才需要画线
    画线需要与世界坐标系的x,y,z轴对应,目前我们不知道屏幕图像上的一点在世界坐标系的位置,
    因此第一步需要计算屏幕像素点在世界坐标的实际位置,即重建世界坐标系。

  2. 世界坐标通过深度值,相机世界坐标以及一个朝向确定: positionWS = _WorldSpaceCameraPos + depth * Direction; 求:Direction
    这个过程相当于NDC反运算,通过near,far,fov计算 相机到近平面四个顶点的向量
    计算过程:height = near * tan(fov / 2);
    width = height * camera.aspect; //aspect为屏幕高宽比
    fwd = camera.fwd * near;
    right = camera.right * width;
    up = camera.up * height;
    四个向量为:
    BottomLeft = fwd - right - up;
    BottomRight = fwd + right - up;
    UpLeft = fwd - right + up;
    UpRight = fwd + right + up;
    将结果线性变换:
    float size = BottomLeft.magnitude / near;
    BottomLeft = BottomLeft.normalize * size;
    BottomRight = BottomRight.normalize * size;
    UpLeft = UpLeft.normalize * size;
    UpRight = UpRight.normalize * size;
    将结果通过RenderFeature传入材质中。

  3. 基于x,y,z轴画线:

  • uv从左下角(0,0)到右上角(1,1),将屏幕划分为四个区域,分别取上面以及计算好的向量
    这时候将渲染屏幕大小的片,其对应有四个顶点,按照uv划分设置朝向,
    在片元着色器中,每个像素得到的朝向是经过四个顶点朝向插值完的结果,因此得到相机到像素世界坐标的朝向以及长度
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    int t = 0;
    if (i.uv.x < 0.5 && i.uv.y < 0.5) // 左下
    t = 0;
    else if (i.uv.x > 0.5 && i.uv.y < 0.5) // 右下
    t = 1;
    else if (i.uv.x > 0.5 && i.uv.y > 0.5) // 左上
    t = 2;
    else // 右上
    t = 3;
    o.Direction = _Matrix[t].xyz;
  • 通过上面的分析得到向量后,能直接得到positionWS = _WorldSpaceCameraPos + depth * Direction + float3(0.01,0.01,0.01); //增加一点偏移是画出线显示在实际物体上面
    输出positionWS查看效果:重新构建的世界坐标系将屏幕划分成四块
    positionWS效果
    将positionWS取frac,0-1:变成条状了
    frac效果
    使用step裁剪掉大部分0.98:最后就变成了线,可以看到3种类型线,分别对应x,y,z轴
    step + frac效果
    赋予颜色:线条变得更加明显了,这些线就代表世界坐标重构之后的x,y,z轴,其对应间隔为1
    1
    2
    float3 Line = step(0.98, frac(positionWS));
    float3 LineColor = Line.x * _ColorX + Line.y * _ColorY + Line.z * _ColorZ;
    赋予颜色
  1. 描边:通过上面学习的基于屏幕深度,深度法线做描边检测即可

  2. 扫描效果:类似上面做过的护盾扫描效果
    float flow=saturate(pow(1-abs(frac(i.positionWS.y0.3-_Time.y0.2)-0.5),10)0.3);
    对其魔改一下,原来的效果为,0-1-0的渐变,扫描时我们通常使用1-0渐变,因此舍弃一部分渐变:
    float mask=saturate(pow(abs(frac(positionWS.x + _Time.y
    0.2)-0.75),10)*0.3);

代码

  • RenderFeature

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    using UnityEngine;
    using UnityEngine.Rendering;
    using UnityEngine.Rendering.Universal;

    /// <summary>
    /// 科幻扫描效果
    /// </summary>
    public class ScanRenderFeather : ScriptableRendererFeature
    {
    class ScanRenderPass : ScriptableRenderPass
    {
    private Setting setting;
    private RenderTargetIdentifier source;
    private Material mat;

    public void Setup(Setting setting)
    {
    this.setting = setting;

    mat = new Material(setting.shader);
    renderPassEvent = setting.Event;
    }

    public void Set(RenderTargetIdentifier source)
    {
    this.source = source;
    mat.SetColor("_ColorX", setting.ColorX);
    mat.SetColor("_ColorY", setting.ColorY);
    mat.SetColor("_ColorZ", setting.ColorZ);
    mat.SetColor("_ColorEdge", setting.ColorEdge);
    mat.SetColor("_OutlineColor", setting.ColorOutline);
    mat.SetFloat("_Width", setting.Width);
    mat.SetFloat("_Spacing", setting.Spacing);
    mat.SetFloat("_Speed", setting.Speed);
    mat.SetFloat("_EdgeSample", setting.EdgeSample);
    mat.SetFloat("_NormalSensitivity", setting.NormalSensitivity);
    mat.SetFloat("_DepthSensitivity", setting.DepthSensitivity);

    if (setting.AXIS == AxisType.X)
    {
    mat.DisableKeyword("_AXIS_Y");
    mat.DisableKeyword("_AXIS_Z");
    mat.EnableKeyword("_AXIS_X");
    }
    else if (setting.AXIS == AxisType.Y)
    {
    mat.DisableKeyword("_AXIS_Z");
    mat.DisableKeyword("_AXIS_X");
    mat.EnableKeyword("_AXIS_Y");
    }
    else
    {
    mat.DisableKeyword("_AXIS_X");
    mat.DisableKeyword("_AXIS_Y");
    mat.EnableKeyword("_AXIS_Z");
    }
    }

    // This method is called before executing the render pass.
    // It can be used to configure render targets and their clear state. Also to create temporary render target textures.
    // When empty this render pass will render to the active camera render target.
    // You should never call CommandBuffer.SetRenderTarget. Instead call <c>ConfigureTarget</c> and <c>ConfigureClear</c>.
    // The render pipeline will ensure target setup and clearing happens in a performant manner.
    public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
    {
    }

    // Here you can implement the rendering logic.
    // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
    // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
    // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
    int temp = Shader.PropertyToID("temp");
    CommandBuffer cmd = CommandBufferPool.Get("扫描特效");
    RenderTextureDescriptor desc = renderingData.cameraData.cameraTargetDescriptor;

    Camera cam = renderingData.cameraData.camera;
    float height = cam.nearClipPlane * Mathf.Tan(Mathf.Deg2Rad * cam.fieldOfView * 0.5f);
    Vector3 up = cam.transform.up * height;
    Vector3 right = cam.transform.right * height * cam.aspect;
    Vector3 forward = cam.transform.forward * cam.nearClipPlane;

    Vector3 ButtomLeft = forward - right - up;
    Vector3 ButtomRight = forward + right - up;
    Vector3 TopRight = forward + right + up;
    Vector3 TopLeft = forward - right + up;

    float scale = ButtomLeft.magnitude / cam.nearClipPlane;
    ButtomLeft = ButtomLeft.normalized * scale;
    ButtomRight = ButtomRight.normalized * scale;
    TopRight = TopRight.normalized * scale;
    TopLeft = TopLeft.normalized * scale;

    Matrix4x4 MATRIX = new Matrix4x4();
    MATRIX.SetRow(0, ButtomLeft);
    MATRIX.SetRow(1, ButtomRight);
    MATRIX.SetRow(2, TopRight);
    MATRIX.SetRow(3, TopLeft);
    mat.SetMatrix("_Matrix", MATRIX);

    cmd.GetTemporaryRT(temp, desc);
    cmd.Blit(source, temp, mat);
    cmd.Blit(temp, source);
    context.ExecuteCommandBuffer(cmd);
    cmd.ReleaseTemporaryRT(temp);

    CommandBufferPool.Release(cmd);
    }

    // Cleanup any allocated resources that were created during the execution of this render pass.
    public override void OnCameraCleanup(CommandBuffer cmd)
    {
    }
    }

    ScanRenderPass m_ScriptablePass;

    public enum AxisType
    {
    X,
    Y,
    Z
    }
    [System.Serializable]
    public class Setting
    {
    public Shader shader = null;
    public RenderPassEvent Event = RenderPassEvent.AfterRenderingTransparents;
    [ColorUsage(true, true)] public Color ColorX = Color.white;
    [ColorUsage(true, true)] public Color ColorY = Color.white;
    [ColorUsage(true, true)] public Color ColorZ = Color.white;
    [ColorUsage(true, true)] public Color ColorEdge = Color.white;
    [ColorUsage(true, true)] public Color ColorOutline = Color.white;
    [Range(0, 0.2f), Tooltip("线框宽度")] public float Width = 0.1f;
    [Range(0.1f, 10), Tooltip("线框间距")] public float Spacing = 1;
    [Range(0, 10), Tooltip("滚动速度")] public float Speed = 1;
    [Range(0, 3), Tooltip("边缘采样半径")] public float EdgeSample = 1;
    [Range(0, 3), Tooltip("法线灵敏度")] public float NormalSensitivity = 1;
    [Range(0, 3), Tooltip("深度灵敏度")] public float DepthSensitivity = 1;
    [Tooltip("特效方向")] public AxisType AXIS;
    }
    [SerializeField]
    public Setting setting;

    public override void Create()
    {
    m_ScriptablePass = new ScanRenderPass();
    m_ScriptablePass.Setup(setting);
    }

    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
    m_ScriptablePass.Set(renderer.cameraColorTarget);
    renderer.EnqueuePass(m_ScriptablePass);
    }
    }

  • Shader

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    Shader "Unlit/Scan"
    {
    Properties
    {
    [HideInInspector] _MainTex("MainTex",2D) = "white"{}
    [HDR]_ColorX("ColorX",Color) = (1,1,1,1)
    [HDR]_ColorY("ColorY",Color) = (1,1,1,1)
    [HDR]_ColorZ("ColorZ",Color) = (1,1,1,1)
    [HDR]_ColorEdge("ColorEdge",Color) = (1,1,1,1)
    _Width("Width",float) = 0.02
    _Spacing("Spacing",float) = 1
    _Speed("Speed",float) = 1
    _EdgeSample("EdgeSample",Range(0,1)) = 1
    _NormalSensitivity("NormalSensitivity",float) = 1
    _DepthSensitivity("DepthSensitivity",float) = 1
    [HDR]_OutlineColor("OutlineColr",Color) = (1,1,1,1)
    }
    SubShader
    {
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
    Cull Off ZWrite Off ZTest Always
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

    TEXTURE2D(_MainTex);
    SAMPLER(sampler_MainTex);

    // 使用_CameraDepthNormalsTexture取代深度图
    // _CameraDepthNormalsTexture.xy法线信息,zw存深度信息,线性深度=z+w/255
    TEXTURE2D(_CameraDepthTexture);
    SAMPLER(sampler_CameraDepthTexture);

    TEXTURE2D(_CameraDepthNormalsTexture);
    SAMPLER(sampler_CameraDepthNormalsTexture);

    CBUFFER_START(UnityPerMaterial)
    float4 _MainTex_ST;
    float4 _MainTex_TexelSize;
    real4 _ColorX;
    real4 _ColorY;
    real4 _ColorZ;
    real4 _ColorEdge;
    real4 _OutlineColor;
    float _Width;
    float _Spacing;
    float _Speed;
    float _EdgeSample;
    float _NormalSensitivity;
    float _DepthSensitivity;
    CBUFFER_END

    float4x4 _Matrix;

    struct Attributes
    {
    float4 positionOS:POSITION;
    float2 uv : TEXCOORD0;
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    float3 Direction:TEXCOORD1;
    };

    Varyings Vert(Attributes i)
    {
    Varyings o;
    o.positionHS = TransformObjectToHClip(i.positionOS.xyz);
    o.uv = i.uv;
    int t = 0;
    if (i.uv.x < 0.5 && i.uv.y < 0.5)
    t = 0;
    else if (i.uv.x > 0.5 && i.uv.y < 0.5)
    t = 1;
    else if (i.uv.x > 0.5 && i.uv.y > 0.5)
    t = 2;
    else
    t = 3;
    o.Direction = _Matrix[t].xyz;
    return o;
    }

    // 主要用于描边检测
    int sobel(Varyings i)//定义索伯检测函数
    {
    real depth[4];
    real2 normal[4];
    float2 uv[4];//计算采样需要的uv

    uv[0] = i.uv + float2(-1, -1) * _EdgeSample * _MainTex_TexelSize.xy;
    uv[1] = i.uv + float2(1, -1) * _EdgeSample * _MainTex_TexelSize.xy;
    uv[2] = i.uv + float2(-1, 1) * _EdgeSample * _MainTex_TexelSize.xy;
    uv[3] = i.uv + float2(1, 1) * _EdgeSample * _MainTex_TexelSize.xy;
    for (int t = 0; t < 4; t++)
    {
    real4 depthnormalTex = SAMPLE_TEXTURE2D(_CameraDepthNormalsTexture, sampler_CameraDepthNormalsTexture, uv[t]);
    normal[t] = depthnormalTex.xy;//得到临时法线
    depth[t] = depthnormalTex.z * 1.0 + depthnormalTex.w / 255.0;//得到线性深度
    }
    //depth检测
    int Dep = abs(depth[0] - depth[3]) * abs(depth[1] - depth[2]) * _DepthSensitivity > 0.01 ? 1 : 0;
    //normal检测
    float2 nor = abs(normal[0] - normal[3]) * abs(normal[1] - normal[2]) * _NormalSensitivity;
    int Nor = (nor.x + nor.y) > 0.01 ? 1 : 0;
    return saturate(Dep + Nor);
    }

    float4 Frag(Varyings i) :SV_Target{
    int outline = sobel(i);

    real4 tex = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
    real4 depthnormal = SAMPLE_TEXTURE2D(_CameraDepthNormalsTexture, sampler_CameraDepthNormalsTexture, i.uv);
    float depth01 = depthnormal.z * 1.0 + depthnormal.w / 255.0;
    depth01 *= _ProjectionParams.z;
    //float depth01 = LinearEyeDepth(SAMPLE_TEXTURE2D(_CameraDepthTexture, sampler_CameraDepthTexture, i.uv).x, _ZBufferParams).x;
    float3 positionWS = _WorldSpaceCameraPos + depth01 * i.Direction + float3(0.01, 0.01, 0.01); //增加一点偏移是画出线显示在实际物体上面
    float3 positionWS01 = positionWS * _ZBufferParams.w;
    float3 Line = step(1 - _Width, frac(positionWS / _Spacing));
    float4 LineColor = Line.x * _ColorX + Line.y * _ColorY + Line.z * _ColorZ + outline * _OutlineColor;

    #ifdef _AXIS_X
    float mask = saturate(pow(abs(frac(positionWS01.x * 10 + _Time.y * 0.1 * _Speed) - 0.53), 10) * 200);
    float mask2 = saturate(pow(abs(frac(positionWS01.x * 10 - _Time.y * 0.1 * _Speed) - 0.47), 10) * 200);
    float mask3 = saturate(pow(abs(frac(positionWS01.z * 10 + _Time.y * 0.1 * _Speed) - 0.53), 10) * 200);
    float mask4 = saturate(pow(abs(frac(positionWS01.z * 10 - _Time.y * 0.1 * _Speed) - 0.47), 10) * 200);
    mask += mask2 + mask3 + mask4;
    mask += step(0.95, mask);
    #elif _AXIS_Y
    float mask = saturate(pow(abs(frac(positionWS01.y * 10 + _Time.y * 0.1 * _Speed) - 0.75), 10) * 10);
    mask += step(0.95, mask);
    #elif _AXIS_Z
    float mask = saturate(pow(abs(frac(positionWS01.z * 10 + _Time.y * 0.1 * _Speed) - 0.75), 10) * 10);
    mask += step(0.95, mask);
    #endif
    return tex * saturate(1 - mask) + (LineColor + _ColorEdge) * mask; // 扫描加入一个覆盖颜色更真实
    }

    ENDHLSL

    Pass
    {
    HLSLPROGRAM
    #pragma vertex Vert
    #pragma fragment Frag
    #pragma multi_compile_local _AXIS_X _AXIS_Y _AXIS_Z
    ENDHLSL
    }
    }
    }

总结

  • 通过屏幕像素点获得对应的世界坐标,在后处理中特别实用,如全局雾效,SSAO等
  • 扫描效果与之前护盾扫描类似,扫描公式大概都以这样的为准,在此基础上进行魔改

屏幕炫光,更好的广告牌算法

基础

  1. 使用模型空间(0,0,0,1)点做MV变换后得相机空间坐标,加上原先顶点的模型偏移,乘P矩阵得最终顶点坐标
    基础广告牌已经完成,变得简单多了
    1
    2
    3
    float4 pivotWS = mul(UNITY_MATRIX_M, float4(0, 0, 0, 1));
    float4 pivotVS = mul(UNITY_MATRIX_V, pivotWS);
    o.positionHS = mul(UNITY_MATRIX_P, pivotVS + float4(i.positionOS.xy, 0, 1));
  2. 加入旋转以及缩放控制,目前对顶点进行变换并不会允许缩放以及旋转的控制
    思路:在模型空间对其x,y坐标做缩放,旋转后(广告牌不需要z值),再放入顶点变换计算中
  • 二维旋转矩阵:
    {
    cos(a),-sin(a),
    sin(a),cos(a)
    }
  • 获得GameObject的世界坐标缩放
    float ScaleX = length(float3(UNITY_MATRIX_M[0].x, UNITY_MATRIX_M[1].x, UNITY_MATRIX_M[2].x));
    float ScaleY = length(float3(UNITY_MATRIX_M[0].y, UNITY_MATRIX_M[1].y, UNITY_MATRIX_M[2].y));
    float ScaleZ = length(float3(UNITY_MATRIX_M[0].z, UNITY_MATRIX_M[1].z, UNITY_MATRIX_M[2].z));
  • 需要与unity顺序一致:先缩放,后旋转
  1. 制作炫光,加入一个渐隐渐现效果
  • 制作渐隐渐现只需要一个值来控制(alpha),怎么求得alpha
  • 物体中心点周围一个范围, 采样屏幕深度纹理,判断深度值,被遮挡不通过
  • 透明值 = 没被遮挡数 / 采样总数 // alpha = passCount / totalSampleCount;
  • 裁剪空间坐标,透除:pos.xy / pos.w

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
Shader "Unlit/ADS Pro"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
[HDR]_BaseColor("BaseColor", Color) = (1,1,1,1)
_Rotate("Rotate",Range(0,3.14)) = 0
}
SubShader
{
Tags { "Queue" = "Overlay" "RenderPipeline" = "UniversalPipeline"}
Blend One One
ZWrite Off
ZTest Always
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_CameraDepthTexture);
SAMPLER(sampler_CameraDepthTexture);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
float _Rotate;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float4 color : COLOR;
};

Varyings Vert(Attributes i)
{
Varyings o;
o.uv = TRANSFORM_TEX(i.uv, _MainTex);
float4 pivotWS = mul(UNITY_MATRIX_M, float4(0, 0, 0, 1));
float4 pivotVS = mul(UNITY_MATRIX_V, pivotWS);
float ScaleX = length(float3(UNITY_MATRIX_M[0].x, UNITY_MATRIX_M[1].x, UNITY_MATRIX_M[2].x));
float ScaleY = length(float3(UNITY_MATRIX_M[0].y, UNITY_MATRIX_M[1].y, UNITY_MATRIX_M[2].y));
//float ScaleZ = length(float3(UNITY_MATRIX_M[0].z, UNITY_MATRIX_M[1].z, UNITY_MATRIX_M[2].z));

float2x2 rotateMatrix = {cos(_Rotate),-sin(_Rotate),sin(_Rotate),cos(_Rotate)};
float2 pos = i.positionOS.xy * float2(ScaleX, ScaleY);
pos = mul(rotateMatrix, pos);
float4 positionVS = pivotVS + float4(pos, 0, 1);
o.positionHS = mul(UNITY_MATRIX_P, positionVS);

int sampleCount = 3;
int axisCount = sampleCount * 2 + 1;
float totalCount = axisCount * axisCount;
float sampleRate = 0.2;//中心区域大小比例
float pivotDepth = -pivotVS.z;//取相机空间轴心的线性深度
float4 pivotCS = mul(UNITY_MATRIX_P, pivotVS);
int passCount = 0;
for (int x = -sampleCount; x <= sampleCount; ++x)
{
for (int y = -sampleCount; y <= sampleCount; ++y)
{
float2 samplePos = pivotCS.xy + o.positionHS.xy * sampleRate * float2(x, y) / axisCount; //裁剪空间采样坐标
float2 SSuv = samplePos / o.positionHS.w * 0.5 + 0.5;//把裁剪空间手动透除,变换到NDC空间下,并根据当前平台判断是否翻转y轴
#ifdef UNITY_UV_STARTS_AT_TOP
SSuv.y = 1 - SSuv.y;
#endif

if (SSuv.x < 0 || SSuv.x>1 || SSuv.y < 0 || SSuv.y>1)
continue;//如果满足跳出本次循环进入下次循环

float depth = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, sampler_CameraDepthTexture, SSuv, 0).x;
depth = LinearEyeDepth(depth, _ZBufferParams);

passCount += step(pivotDepth, depth);
}
}

o.color = _BaseColor * _BaseColor.a * passCount / totalCount;
o.color *= smoothstep(0.1, 2, pivotDepth);
return o;
}

float4 Frag(Varyings i) :SV_Target{
float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
return i.color * color;
}

ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" "RenderType" = "Overlay" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}

总结

  • 取线性深度可以通过相机空间的-z值获取
  • 判断平台差异,倒转y,UNITY_UV_STARTS_AT_TOP

屏幕空间贴花

基础

  1. Unity官方从2021.2版本开始才提供了 Unity URP官方文档2021.2版本
  • 贴花组件 Decal Projector Component
  • Shader使用:Shader Graphs/Decal
  • 使用时打开RenderFeather: Decal
  1. VisualEffectGraph粒子系统里提供了内置的ForwardDecal

  2. 自己实现一个屏幕空间贴画

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
Shader "Unlit/Decal"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_BaseColor ("BaseColor", Color) = (1,1,1,1)
_EdgeStretchPrevent ("EdgeStretchPrevent", Range(-1,1)) = 0
}
SubShader
{
Tags { "Queue" = "Transparent-499" "RenderType" = "Overlay" "RenderPipeline" = "UniversalPipeline" "DisableBatch" = "True"}
Blend SrcAlpha OneMinusSrcAlpha
HLSLINCLUDE

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);

TEXTURE2D(_CameraDepthTexture);
SAMPLER(sampler_CameraDepthTexture);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
float _EdgeStretchPrevent;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float4 screenPos : TEXCOORD0;
float3 cameraPosOS:TEXCOORD1;
float4 cameraVertexDirOS:TEXCOORD2;
};

Varyings Vert(Attributes i)
{
Varyings o;
o.positionHS = TransformObjectToHClip(i.positionOS.xyz);

// 与ComputeScreenPos()函数效果相同
/*o.screenPos = o.positionHS * 0.5f;
o.screenPos.xy = float2(o.screenPos.x, o.screenPos.y * _ProjectionParams.x) + o.screenPos.w;
o.screenPos.zw = o.positionHS.zw;*/

o.screenPos = ComputeScreenPos(o.positionHS);

float4 posVS = mul(UNITY_MATRIX_V, mul(UNITY_MATRIX_M, i.positionOS));
o.cameraVertexDirOS.w = -posVS.z; //w存线性深度
o.cameraVertexDirOS.xyz = mul(UNITY_MATRIX_I_M, mul(UNITY_MATRIX_I_V, float4(posVS.xyz, 0))).xyz; //变换回模型空间坐标,但忽略了平移矩阵

o.cameraPosOS = mul(UNITY_MATRIX_I_M, mul(UNITY_MATRIX_I_V, float4(0, 0, 0, 1))).xyz;//计算模型空间下的相机坐标
return o;
}

float4 Frag(Varyings i) :SV_Target{
float2 uv = i.screenPos.xy / i.screenPos.w;
float depth = LinearEyeDepth(SAMPLE_TEXTURE2D(_CameraDepthTexture, sampler_CameraDepthTexture, uv).x, _ZBufferParams);

i.cameraVertexDirOS.xyz /= i.cameraVertexDirOS.w;
float3 decalPos = i.cameraPosOS + i.cameraVertexDirOS.xyz * depth;//模型空间下的计算:相机坐标+相机朝着顶点的射线(已透除)*相机空间的线性深度

float mask = (abs(decalPos.x) < 0.5 ? 1 : 0) * (abs(decalPos.y) < 0.5 ? 1 : 0) * (abs(decalPos.z) < 0.5 ? 1 : 0);

float3 decalNormal = normalize(cross(ddy(decalPos), ddx(decalPos)));

mask *= decalNormal.y > 0.2 * _EdgeStretchPrevent ? 1 : 0;//边缘拉伸的防止阈值

float2 YdecalUV = decalPos.xz + 0.5; // 模型空间下坐标偏移 -0.5 到 0.5, 因此+0.5使其 0 - 1范围

float4 tex = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, YdecalUV) * mask;

return tex;
}

ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
#pragma target 3.0
ENDHLSL
}
}
}

总结

  • ddx,ddy函数解释:对屏幕坐标x和y方向的偏导数
    ddx(v) = 该像素点右边的v值 - 该像素点的v值
    ddy(v) = 该像素点下面的v值 - 该像素点的v值
    ddx(float3(1,2,3)) = float3(0,0,0) //因为使用该shader的所有像素 输出的记录值都是 float3(1,2,3)那么差值就为float3(0,0,0)
    即理解为:ddx(v),在屏幕上水平方向横跨一个像素的v值变化量,ddy,则在垂直方向上b的变化量

解释:代码中使用 float3 decalNormal = normalize(cross(ddy(decalPos), ddx(decalPos)));
拿到模型空间decalPos后,ddy(decalPos)求得其往下的向量,ddx(decalPos)求得往右的向量,相当于得到decalPos在其相对坐标上的的x,y向量,叉乘得到z向量(垂直与两个向量)
可以近似理解为:ddx(decalPos) = (u+1,v)求出来的decalPos - (u,v)求出来的decalPos,在使用中相当于pos2-pos1,得到pos1到pos2的向量

  • 防止以后绕晕了再记录一下,以下M,V代表变换矩阵,I_M,I_V代表转置
    positionVS = mul(v * mul(M, positionOS)); positionOS = mul(I_M, mul(I_V, positionVS);
    将变换过程理解为: positionOS -> M -> V = positionVS; 那么回去需要原路返回:positionVS -> I_V -> I_M = positionOS;

SSAO屏幕空间环境光遮蔽

基础理论

参考 com.unity.render-pipelines.universal@12.1.1\ShaderLibrary\SSAO.hlsl
环境遮罩之SSAO原理
URP屏幕空间环境光遮蔽后处理(SSAO)
【光线追踪系列十六】基于着色点的正向半球随机方向生成

  • 首先需要明确一点将所有坐标都转换至相机空间操作。
  1. 通过屏幕uv获取像素深度值,在裁剪空间中深度值就为其z轴的值,根据uv以及depth重建像素在相机空间的坐标
    屏幕uv从0到1,裁剪空间uv从-1到1,将uv*2-1,范围变换至-1到1就为裁剪空间xy坐标。
    通过renderingData.cameraData.GetGPUProjectionMatrix()获得当前相机P矩阵,以及P逆矩阵
    通过逆矩阵求得VS坐标,进行齐次除法

    1
    2
    3
    4
    5
    6
    7
    //根据UV和depth,重建像素在viewspace中的坐标
    float3 ReconstructPositionVS(float2 uv, float depth) {
    float4 positionInHS = float4(uv * 2 - 1, depth, 1);
    float4 positionVS = mul(CustomInvProjMatrix, positionInHS);
    positionVS /= positionVS.w;
    return positionVS.xyz;
    }
  2. 得到当前像素的VS坐标后,需要在沿其法线方向的半球中随机采样点,然后计算对当前像素的ao影响
    规定半球半径_SampleRadius,采样点数量_SampleCount,随机半球可以参照【光线追踪系列十六】基于着色点的正向半球随机方向生成

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    float Random(float2 st) {
    return frac(sin(dot(st, float2(12.9898, 78.233))) * 43758.5453123);
    }

    float Random(float x) {
    return frac(sin(x) * 43758.5453123);
    }
    // 随机球
    float3 RandomSphere(float3 positionVS, float index)
    {
    float r1 = Random(positionVS.xy);
    float r2 = Random(index);
    float z = sqrt(1 - r2);
    float th = 2 * PI * r1;
    float x = cos(th) * sqrt(r2);
    float y = sin(th) * sqrt(r2);
    return float3(x, y, z);
    }
  3. 先随机一个半球的单位向量,再将其转换至视觉空间,将偏移加载像素VS坐标上,通过偏移后的VS再获得当前屏幕uv,采样该uv随机点的深度值,再重新构建VS坐标

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    float2 ReProjectToUV(float3 positionVS) {
    float4 positionHS = mul(CustomProjMatrix, float4(positionVS, 1));
    return (positionHS.xy / positionHS.w + 1) * 0.5;
    }

    float3 offset = RandomSphere(positionVS, it);
    offset = normalize(mul(TBN, offset));
    float3 samplePositionVS = positionVS + offset * _SampleRadius;
    float2 sampleUV = ReProjectToUV(samplePositionVS);
    float sampleDepth = SampleDepth(sampleUV);
    float3 hitPositionVS = ReconstructPositionVS(sampleUV, sampleDepth);
  4. 获取到随机采样点后,计算该点对当前像素的ao影响值

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
Shader "Unlit/SSAO"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
ZTest Always ZWrite Off Cull Off
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}

HLSLINCLUDE

#pragma shader_feature __AO_DEBUG__
#pragma shader_feature _Blur

#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Filtering.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Input.hlsl"

struct Attributes
{
float4 positionOS : POSITION;
float2 uv : TEXCOORD0;
UNITY_VERTEX_INPUT_INSTANCE_ID
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
UNITY_VERTEX_OUTPUT_STEREO
};

Varyings Vert(Attributes input)
{

Varyings output;
UNITY_SETUP_INSTANCE_ID(input);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(output);
output.positionHS = TransformObjectToHClip(input.positionOS);
output.uv = input.uv;
return output;
}

ENDHLSL

Pass
{
HLSLPROGRAM

#pragma vertex Vert
#pragma fragment Frag

TEXTURE2D_X_FLOAT(_CameraDepthTexture);

TEXTURE2D_X(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_TexelSize;
float4x4 CustomProjMatrix;
float4x4 CustomInvProjMatrix;
float _Atten;
float _Contrast;
float _SampleRadius;
int _SampleCount;
CBUFFER_END

float Random(float2 st) {
return frac(sin(dot(st, float2(12.9898, 78.233))) * 43758.5453123);
}

float Random(float x) {
return frac(sin(x) * 43758.5453123);
}

float3 RandomSampleOffset(float2 uv, float index) {
float2 alphaBeta = float2(Random(uv) * PI * 2, Random(index) * PI);
float2 sin2;
float2 cos2;
sincos(alphaBeta, sin2, cos2);
return float3(cos2.y * cos2.x, sin2.y, cos2.y * sin2.x);
}

//根据UV和depth,重建像素在viewspace中的坐标
float3 ReconstructPositionVS(float2 uv, float depth) {
float4 positionInHS = float4(uv * 2 - 1, depth, 1);
float4 positionVS = mul(CustomInvProjMatrix, positionInHS);
positionVS /= positionVS.w;
return positionVS.xyz;
}

float2 ReProjectToUV(float3 positionVS) {
float4 positionHS = mul(CustomProjMatrix, float4(positionVS, 1));
return (positionHS.xy / positionHS.w + 1) * 0.5;
}

float SampleDepth(float2 uv) {
return LOAD_TEXTURE2D_X(_CameraDepthTexture, _MainTex_TexelSize.zw * uv).x;
}

float4 Frag(Varyings i) :SV_Target{
float4 color = SAMPLE_TEXTURE2D_X(_MainTex, sampler_MainTex, i.uv);
float depth = SampleDepth(i.uv);

float3 positionVS = ReconstructPositionVS(i.uv, depth);

float3 tangentVS = normalize(ddx(positionVS));
//重建法线
float3 normalVS = normalize(cross(ddy(positionVS), ddx(positionVS))); // 面法线

float3 binormalVS = cross(normalVS, tangentVS);
float3x3 TBN = {tangentVS, binormalVS, normalVS};

float ao = 0;
float rcpSampleCount = rcp(_SampleCount);
for (int it = 0; it < (int)_SampleCount; ++it)
{
// 随机偏移值
float3 offset = RandomSampleOffset(i.uv, it);
offset = mul(TBN, offset);
float3 samplePositionVS = positionVS + offset * _SampleRadius * (1 + it) * rcpSampleCount;
//float4 samplePositionHS = mul(CustomProjMatrix, float4(samplePositionVS, 1));
//float4 sampleScreenPos = ComputeScreenPos(samplePositionHS); //ComputeScreenPos需要在顶点着色器使用,在片元计算结果不对劲
//float2 sampleUV = sampleScreenPos.xy / sampleScreenPos.w;
float2 sampleUV = ReProjectToUV(samplePositionVS);

float sampleDepth = SampleDepth(sampleUV);
float3 hitPositionVS = ReconstructPositionVS(sampleUV, sampleDepth);

float3 hitOffset = hitPositionVS - positionVS;
float a = max(0, dot(hitOffset, normalVS) - 0.001); //0~radius
float b = dot(hitOffset, hitOffset) + 0.001; //0~ radius^2
ao += a * rcp(b); // 0 ~ 1/radius
}
ao *= _SampleRadius * rcpSampleCount;
ao = PositivePow(ao * _Atten, _Contrast);
ao = 1 - saturate(ao);
return ao;
#if __AO_DEBUG__ || _Blur
return float4(ao, ao, ao, 1);
#else
return ao * color;
#endif
}
ENDHLSL
}

Pass
{
HLSLPROGRAM

#pragma vertex Vert
#pragma fragment FragBlurH

#include "../Blur/Blur.hlsl"

TEXTURE2D_X(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_TexelSize;
CBUFFER_END

float4 FragBlurH(Varyings i) : SV_Target
{
return BoxBlur(_MainTex,i.uv * _MainTex_TexelSize.zw,2,float2(1,0));
}
ENDHLSL
}

Pass
{
HLSLPROGRAM

#pragma vertex Vert
#pragma fragment FragBlurV

#include "../Blur/Blur.hlsl"

TEXTURE2D_X(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_TexelSize;
CBUFFER_END

float4 FragBlurV(Varyings i) : SV_Target
{

return BoxBlur(_MainTex,i.uv * _MainTex_TexelSize.zw,2,float2(0,1));
}

ENDHLSL
}


Pass
{
HLSLPROGRAM

#pragma vertex Vert
#pragma fragment FragComb

TEXTURE2D_X(_MainTex);
SAMPLER(sampler_MainTex);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_TexelSize;

CBUFFER_END

TEXTURE2D_X(_AOTex);
SAMPLER(sampler_AOTex);

float4 FragComb(Varyings i) : SV_Target
{
float4 color = SAMPLE_TEXTURE2D_X(_MainTex, sampler_MainTex, i.uv);
float ao = SAMPLE_TEXTURE2D_X(_AOTex, sampler_AOTex, i.uv);
#if __AO_DEBUG__
return ao;
#else
return ao * color;
#endif
}

ENDHLSL
}
}
}

总结

屏幕空间操作指南:

  1. 通过uv,depth,重构世界坐标,需要InvVP,vp逆矩阵,(相机空间坐标同理) 都先得到裁剪空间坐标再进行对应的变换:positionCS = P * V * M * positionOS

    1
    2
    3
    4
    5
    6
    float3 ReconstructPositionWS(float2 uv, float depth) {
    float3 positionCS = float3(uv * 2 - 1, depth);
    float4 positionWS = mul(_MatrixInvVP, float4(positionCS, 1));
    positionWS /= positionWS.w;
    return positionWS.xyz;
    }
  2. 同样可以从世界坐标转换至裁剪坐标CS,裁剪坐标的xy范围(0-1)的uv值,z值为深度值

    1
    2
    3
    4
    5
    6
    float3 Reproject(float3 positionWS) {
    float4 positionCS = mul(_MatrixVP, float4(positionWS, 1));
    positionCS /= positionCS.w;
    positionCS.xy = (positionCS.xy + 1) * 0.5;
    return positionCS.xyz;
    }
  3. 裁剪空间得到屏幕uv

    1
    float2 pixelCoord = positionCS.xy * _MainTex_TexelSize.zw;

SSPR屏幕空间平面反射

基础

  1. 记录需要平面反射的平面,世界坐标以及法线,针对所有平面都有以下操作(一个坐标一个法线确定一个平面)
  2. 通过屏幕uv以及depth,重构世界坐标系
  3. 在ComputeShader中做反转变换:将世界坐标沿平面反转,得到新的反转点后再转换至屏幕空间得到uv2
  4. uv2的颜色就是反射uv1的颜色
  5. 将ComputShader反转后的图像,用于平面的渲染,渲染是需要判断当前深度》屏幕深度,则渲染图像上的颜色

代码

  • shader:需要反射的平面,对其反射处理使用的shader

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    Shader "URPLearn/PlanarReflection"
    {
    Properties
    {
    }

    SubShader
    {
    ZTest Always ZWrite Off Cull Off
    Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}

    Blend One One
    HLSLINCLUDE

    #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
    #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Filtering.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Input.hlsl"

    struct Attributes
    {
    float4 positionOS : POSITION;
    float2 uv : TEXCOORD0;
    UNITY_VERTEX_INPUT_INSTANCE_ID
    };

    struct Varyings
    {
    float4 positionHS : SV_POSITION;
    float2 uv : TEXCOORD0;
    float4 screenPos : TEXCOORD1;
    UNITY_VERTEX_OUTPUT_STEREO
    };

    Varyings Vert(Attributes input)
    {
    Varyings output;
    UNITY_SETUP_INSTANCE_ID(input);
    UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(output);
    output.positionHS = TransformObjectToHClip(input.positionOS);
    output.uv = input.uv;
    output.screenPos = ComputeScreenPos(output.positionHS);
    return output;
    }

    TEXTURE2D(_ReflectionTex);
    TEXTURE2D(_CameraDepthTexture);

    SAMPLER(sampler_ReflectionTex);
    SAMPLER(sampler_CameraDepthTexture);

    CBUFFER_START(UnityPerMaterial)
    CBUFFER_END


    float4 Frag(Varyings i) : SV_Target
    {
    float2 screenUV = i.screenPos.xy / i.screenPos.w;
    float depth = SAMPLE_TEXTURE2D(_CameraDepthTexture, sampler_CameraDepthTexture, screenUV);

    if (i.positionHS.z >= depth) {
    float4 color = SAMPLE_TEXTURE2D_X(_ReflectionTex, sampler_ReflectionTex, screenUV);
    return color;
    }
    else {
    discard;
    return float4(0,0,0,0);
    }
    }
    ENDHLSL

    Pass
    {
    HLSLPROGRAM

    #pragma vertex Vert
    #pragma fragment Frag

    ENDHLSL
    }
    }
    }
  • RenderFeather:对标记的平面做反射以及再次渲染

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    163
    164
    165
    166
    167
    168
    169
    170
    171
    172
    173
    174
    175
    176
    177
    178
    179
    180
    181
    182
    183
    184
    185
    186
    187
    188
    189
    190
    191
    192
    193
    194
    195
    196
    197
    198
    199
    200
    201
    202
    203
    204
    205
    206
    207
    208
    209
    210
    211
    212
    213
    214
    215
    216
    217
    218
    219
    220
    221
    222
    223
    224
    225
    226
    227
    228
    229
    230
    231
    232
    233
    234
    235
    236
    237
    238
    239
    240
    241
    using System;
    using UnityEngine;
    using UnityEngine.Rendering;
    using UnityEngine.Rendering.Universal;

    namespace URPLearn
    {

    public class SSPRRenderFeather : ScriptableRendererFeature
    {
    class SSPRPlanarRenderPass : ScriptableRenderPass
    {
    private Material _material;
    private SSPRTexGenerator _ssprTexGenerator = new SSPRTexGenerator();

    private PlanarRendererGroups _planarRendererGroups = new PlanarRendererGroups();

    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
    CommandBuffer cmd = CommandBufferPool.Get("SSPR-ReflectionTex");

    ReflectPlane.GetVisiblePlanarGroups(_planarRendererGroups);
    foreach (var group in _planarRendererGroups.PlanarRenderers)
    {
    cmd.Clear();
    var planarDescriptor = group.descriptor;
    var renderers = group.renderers;
    _ssprTexGenerator.Render(cmd, this.colorAttachment, ref renderingData, ref group.descriptor);
    cmd.SetRenderTarget(this.colorAttachment, this.depthAttachment);
    foreach (var rd in renderers)
    {
    cmd.DrawRenderer(rd, _material);
    }
    _ssprTexGenerator.ReleaseTemporary(cmd);
    context.ExecuteCommandBuffer(cmd);
    }

    cmd.Release();
    }

    public void Setup(Material material, ComputeShader computeShader, bool blur, bool excludeBackground)
    {
    _material = material;
    _ssprTexGenerator.BindCS(computeShader);
    _ssprTexGenerator.enableBlur = blur;
    _ssprTexGenerator.excludeBackground = excludeBackground;
    }
    }

    [SerializeField]
    private Material _material;

    [SerializeField]
    private ComputeShader _computeShader;

    [SerializeField]
    private bool _blur;

    [SerializeField]
    private bool _excludeBackground;

    SSPRPlanarRenderPass m_ScriptablePass;

    /// <inheritdoc/>
    public override void Create()
    {
    m_ScriptablePass = new SSPRPlanarRenderPass();

    m_ScriptablePass.renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing;
    }

    // Here you can inject one or multiple render passes in the renderer.
    // This method is called when setting up the renderer once per-camera.
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
    if (renderingData.cameraData.renderType != CameraRenderType.Base)
    {
    return;
    }
    if (_material == null || _computeShader == null)
    {
    return;
    }
    m_ScriptablePass.Setup(_material, _computeShader, _blur, _excludeBackground);
    m_ScriptablePass.ConfigureTarget(renderer.cameraColorTarget, renderer.cameraDepthTarget);
    renderer.EnqueuePass(m_ScriptablePass);
    }
    }

    public class SSPRTexGenerator
    {
    private static class ShaderProperties
    {

    public static readonly int Result = Shader.PropertyToID("_Result");
    public static readonly int CameraColorTexture = Shader.PropertyToID("_CameraColorTexture");
    public static readonly int PlanarPosition = Shader.PropertyToID("_PlanarPosition");
    public static readonly int PlanarNormal = Shader.PropertyToID("_PlanarNormal");
    public static readonly int MatrixVP = Shader.PropertyToID("_MatrixVP");
    public static readonly int MatrixInvVP = Shader.PropertyToID("_MatrixInvVP");
    public static readonly int MainTexelSize = Shader.PropertyToID("_MainTex_TexelSize");
    }

    private ComputeShader _computeShader;
    private int _reflectionTexID;

    private int _kernelClear;
    private int _kernalPass1;
    private int _kernalPass2;

    /// <summary>
    /// 在生成反射贴图的时候,是否剔除掉无穷远的像素(例如天空盒)
    /// </summary>
    private bool _excludeBackground;
    /// <summary>
    /// 模糊
    /// </summary>
    private bool _enableBlur;

    private BlurBlitter _blurBlitter = new BlurBlitter();

    public SSPRTexGenerator(string reflectTexName = "_ReflectionTex")
    {
    _reflectionTexID = Shader.PropertyToID(reflectTexName);
    }
    public void BindCS(ComputeShader cp)
    {
    _computeShader = cp;
    this.UpdateKernelIndex();
    }
    private void UpdateKernelIndex()
    {
    _kernelClear = _computeShader.FindKernel("Clear");
    _kernalPass1 = _computeShader.FindKernel("DrawReflectionTex1");
    _kernalPass2 = _computeShader.FindKernel("DrawReflectionTex2");
    if (_excludeBackground)
    {
    _kernalPass1 += 2;
    _kernalPass2 += 2;
    }
    }

    public bool excludeBackground
    {
    get
    {
    return _excludeBackground;
    }
    set
    {
    _excludeBackground = value;
    if (_computeShader)
    {
    this.UpdateKernelIndex();
    }
    }
    }

    public bool enableBlur
    {
    get
    {
    return _enableBlur;
    }
    set
    {
    _enableBlur = value;
    }
    }

    public void Render(CommandBuffer cmd, RenderTargetIdentifier id, ref RenderingData renderingData, ref PlanarDescriptor planarDescriptor)
    {

    if (_computeShader == null)
    {
    Debug.LogError("请设置CS");
    return;
    }

    var reflectionTexDes = renderingData.cameraData.cameraTargetDescriptor;
    reflectionTexDes.enableRandomWrite = true;
    reflectionTexDes.msaaSamples = 1;
    cmd.GetTemporaryRT(_reflectionTexID, reflectionTexDes);

    var rtWidth = reflectionTexDes.width;
    var rtHeight = reflectionTexDes.height;

    // V矩阵
    Matrix4x4 v = renderingData.cameraData.camera.worldToCameraMatrix;
    // 还不清楚:为什么不直接使用renderingData.cameraData.GetProjectionMatrix()
    Matrix4x4 p = GL.GetGPUProjectionMatrix(renderingData.cameraData.GetProjectionMatrix(), false);
    // MVP矩阵变换过程都是 右乘向量,所以VP = p * v;
    var matrixVP = p * v;
    var invMatrixVP = matrixVP.inverse;

    // computeShader 中thread组设置为(8,8,1)
    int threadGroupX = reflectionTexDes.width / 8;
    int threadGroupY = reflectionTexDes.height / 8;

    RenderTargetIdentifier cameraColorTex = id;

    // computeshader参数设置
    cmd.SetComputeVectorParam(_computeShader, ShaderProperties.MainTexelSize, new Vector4(1.0f / rtWidth, 1.0f / rtHeight, rtWidth, rtHeight));
    cmd.SetComputeVectorParam(_computeShader, ShaderProperties.PlanarPosition, planarDescriptor.position);
    cmd.SetComputeVectorParam(_computeShader, ShaderProperties.PlanarNormal, planarDescriptor.normal);
    cmd.SetComputeMatrixParam(_computeShader, ShaderProperties.MatrixVP, matrixVP);
    cmd.SetComputeMatrixParam(_computeShader, ShaderProperties.MatrixInvVP, invMatrixVP);

    // Texture相关参数都只能对应kernel设置
    cmd.SetComputeTextureParam(_computeShader, _kernelClear, ShaderProperties.Result, _reflectionTexID);
    cmd.DispatchCompute(_computeShader, _kernelClear, threadGroupX, threadGroupY, 1);

    // Pass1 对像素做反转
    cmd.SetComputeTextureParam(_computeShader, _kernalPass1, ShaderProperties.CameraColorTexture, cameraColorTex);
    cmd.SetComputeTextureParam(_computeShader, _kernalPass1, ShaderProperties.Result, _reflectionTexID);
    cmd.DispatchCompute(_computeShader, _kernalPass1, threadGroupX, threadGroupY, 1);

    // Pass2 修理反转后像素的遮挡问题
    cmd.SetComputeTextureParam(_computeShader, _kernalPass2, ShaderProperties.CameraColorTexture, cameraColorTex);
    cmd.SetComputeTextureParam(_computeShader, _kernalPass2, ShaderProperties.Result, _reflectionTexID);
    cmd.DispatchCompute(_computeShader, _kernalPass2, threadGroupX, threadGroupY, 1);

    if (_enableBlur)
    {
    _blurBlitter.SetSource(_reflectionTexID, reflectionTexDes);
    _blurBlitter.blurType = BlurType.BoxBilinear;
    _blurBlitter.iteratorCount = 1;
    _blurBlitter.downSample = 1;
    _blurBlitter.Render(cmd);
    }

    // 将结果图片设置为全局(在当前cmd内都能直接获取)
    cmd.SetGlobalTexture(_reflectionTexID, _reflectionTexID);
    }

    public void ReleaseTemporary(CommandBuffer cmd)
    {
    cmd.ReleaseTemporaryRT(_reflectionTexID);
    }
    }
    }
  • ReflectPlane:标记哪些平面需要反射

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    163
    164
    165
    166
    using System;
    using System.Collections.Generic;
    using System.Linq;
    using System.Text;
    using System.Threading.Tasks;
    using UnityEngine;

    namespace URPLearn
    {
    /// <summary>
    /// 一个平面,平面由一个点和法线来确定
    /// </summary>
    public struct PlanarDescriptor
    {
    public Vector3 position;
    public Vector3 normal;

    public static bool operator ==(PlanarDescriptor p1, PlanarDescriptor p2)
    {
    return IsNormalEqual(p1.normal, p2.normal) && IsPositionInPlanar(p1.position, p2);
    }

    public static bool operator !=(PlanarDescriptor p1, PlanarDescriptor p2)
    {
    return !IsNormalEqual(p1.normal, p2.normal) || !IsPositionInPlanar(p1.position, p2);
    }

    public override bool Equals(object obj)
    {
    if (obj == null)
    {
    return false;
    }
    if (obj is PlanarDescriptor p)
    {
    return IsNormalEqual(normal, p.normal) && IsPositionInPlanar(p.position, this);
    }
    else
    {
    return false;
    }
    }

    public override int GetHashCode()
    {
    int hash = 17;
    hash = hash * 23 + position.GetHashCode();
    hash = hash * 23 + normal.GetHashCode();
    return hash;
    }

    public override string ToString()
    {
    return base.ToString();
    }
    private static bool IsNormalEqual(Vector3 n1, Vector3 n2)
    {
    return 1 - Vector3.Dot(n1, n2) < 0.001f;
    }

    private static bool IsPositionInPlanar(Vector3 checkPos, PlanarDescriptor planar)
    {
    return Vector3.Dot(planar.position - checkPos, planar.normal) < 0.01f;
    }
    }

    public class PlanarRendererGroup
    {
    public PlanarDescriptor descriptor;
    public HashSet<Renderer> renderers = new HashSet<Renderer>();

    public void Clear()
    {
    renderers.Clear();
    }
    }

    public class PlanarRendererGroups
    {
    /// <summary>
    /// 池子
    /// </summary>
    private Stack<PlanarRendererGroup> _freePool = new Stack<PlanarRendererGroup>();
    /// <summary>
    /// 平面反射
    /// </summary>
    private List<PlanarRendererGroup> _planarRenderers = new List<PlanarRendererGroup>();

    public List<PlanarRendererGroup> PlanarRenderers => _planarRenderers;

    public void AddRender(Renderer renderer)
    {
    var position = renderer.transform.position;
    var normal = renderer.transform.up;
    var planarDescriptor = new PlanarDescriptor()
    {
    position = position,
    normal = normal
    };
    // 如果有同一平面的则放在一起渲染
    foreach (var renderers in _planarRenderers)
    {
    if (renderers.descriptor == planarDescriptor)
    {
    renderers.renderers.Add(renderer);
    return;
    }
    }
    // 没有则添加一个平面渲染组
    {
    var group = AllocateGroup();
    group.descriptor = planarDescriptor;
    group.renderers.Add(renderer);
    _planarRenderers.Add(group);
    }
    }

    private PlanarRendererGroup AllocateGroup()
    {
    if (_freePool.Count > 0)
    return _freePool.Pop();
    else
    return new PlanarRendererGroup();
    }

    public void Clear()
    {
    foreach (var group in _planarRenderers)
    {
    group.Clear();
    _freePool.Push(group);
    }
    _planarRenderers.Clear();
    }
    }

    [ExecuteInEditMode]
    public class ReflectPlane : MonoBehaviour
    {
    private static List<ReflectPlane> _reflectPlanes = new List<ReflectPlane>();
    public static List<ReflectPlane> ReflectPlanes => _reflectPlanes;

    public static void GetVisiblePlanarGroups(PlanarRendererGroups groups)
    {
    groups.Clear();
    foreach (var p in ReflectPlanes)
    {
    var renderer = p.GetComponent<Renderer>();
    if (renderer.isVisible)
    {
    groups.AddRender(renderer);
    }
    }
    }

    private void OnEnable()
    {
    _reflectPlanes.Add(this);
    }

    private void OnDisable()
    {
    _reflectPlanes.Remove(this);
    }
    }
    }
  • ComputeShader:对图像进行反转操作

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    #pragma kernel Clear
    #pragma kernel DrawReflectionTex1
    #pragma kernel DrawReflectionTex2
    #pragma kernel DrawReflectionTex1 EXCLUDE_BACKGROUND
    #pragma kernel DrawReflectionTex2 EXCLUDE_BACKGROUND

    #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"

    RWTexture2D<float4> _Result;

    Texture2D<float4> _CameraColorTexture;
    Texture2D<float> _CameraDepthTexture;
    float4 _MainTex_TexelSize;

    float4x4 _MatrixInvVP;
    float4x4 _MatrixVP;

    float3 _PlanarPosition;
    float3 _PlanarNormal;

    SamplerState PointClampSampler;


    float3 TransformPositionCSToWS(float3 positionCS) {
    float4 positionWS = mul(_MatrixInvVP, float4(positionCS, 1));
    positionWS /= positionWS.w;
    return positionWS.xyz;
    }

    float3 ReconstructPositionWS(float2 uv, float depth) {
    float3 positionCS = float3(uv * 2 - 1, depth);
    float3 positionWS = TransformPositionCSToWS(positionCS);
    return positionWS;
    }

    float3 Reproject(float3 positionWS) {
    float4 positionCS = mul(_MatrixVP, float4(positionWS, 1));
    positionCS /= positionCS.w;
    positionCS.xy = (positionCS.xy + 1) * 0.5;
    return positionCS.xyz;
    }

    float4 GetMirrorPositionWS(float3 positionWS) {
    float normalProj = dot(positionWS - _PlanarPosition, _PlanarNormal);
    return float4(positionWS - normalProj * _PlanarNormal * 2, normalProj);
    }


    [numthreads(8, 8, 1)]
    void Clear(uint3 id : SV_DispatchThreadID)
    {
    _Result[id.xy] = float4(0, 0, 0, 0);
    }


    float4 GetMirrorPositionWSFromID(uint3 id) {
    float2 pixelCoord = id.xy;
    float2 uv = id.xy * _MainTex_TexelSize.xy;
    float depth = _CameraDepthTexture.SampleLevel(PointClampSampler, uv, 0);
    #ifdef EXCLUDE_BACKGROUND
    #if UNITY_REVERSED_Z
    if (depth == 0)
    #else
    if (depth == 1)
    #endif
    {
    return float4(0, 0, 0, 0);
    }
    #endif
    float3 positionWS = ReconstructPositionWS(uv, depth);
    float4 mirrorPositionWS = GetMirrorPositionWS(positionWS);
    return mirrorPositionWS;
    }

    float3 GetMirrorUVDepthFromID(uint3 id) {
    float4 mirrorPositionWS = GetMirrorPositionWSFromID(id);
    if (mirrorPositionWS.w > 0.01) {
    float3 uvAndDepth = Reproject(mirrorPositionWS.xyz);
    return uvAndDepth;
    }
    else {
    return float3(0, 0, 0);
    }
    }


    [numthreads(8, 8, 1)]
    void DrawReflectionTex1(uint3 id : SV_DispatchThreadID) {
    float2 uv = id.xy;
    float3 mirrorUVAndDepth = GetMirrorUVDepthFromID(id);
    float2 mirrorPixelCoord = mirrorUVAndDepth.xy * _MainTex_TexelSize.zw;
    _Result[mirrorPixelCoord] = float4(_CameraColorTexture[uv].rgb, mirrorUVAndDepth.z);
    }

    [numthreads(8, 8, 1)]
    void DrawReflectionTex2(uint3 id : SV_DispatchThreadID) {
    float2 uv = id.xy;
    float3 mirrorUVAndDepth = GetMirrorUVDepthFromID(id);
    float2 toPixelCoord = mirrorUVAndDepth.xy * _MainTex_TexelSize.zw;
    float4 originalColor = _Result[toPixelCoord];

    #if UNITY_REVERSED_Z
    bool overwrite = mirrorUVAndDepth.z > originalColor.a;
    #else
    bool overwrite = mirrorUVAndDepth.z < originalColor.a;
    #endif
    if (overwrite) {
    _Result[toPixelCoord] = float4(_CameraColorTexture[uv].rgb, 1);
    }
    else {
    _Result[toPixelCoord] = float4(originalColor.rgb, 1);
    }
    }

总结

  • ComputeScreenPos在顶点着色器计算好屏幕空间坐标,再在片元着色器做齐次除法就可以得到屏幕uv
  • 屏幕空间操作指南:参考上面SSAO总结,一般就是涉及世界坐标重建这个事情

URP实现PBR

基础

  • 满足以下几点的光照模型,符合PBR模型:
  1. 微表面:不同材质的平面,有很多不同朝向不一的微小平面
  2. 能量守恒:出射光的总量不超过入射光的总量
  3. 反射方程:使用基于物理的BRDF(双向反射分布函数)
  • PBR反射方程:L(o) = f(fr(p,wi,wo) * Li(p,wi) * dot(n,wi) * dwi)

  • BRDF-Cooktorrance方程:

  1. fr(p,wi,wo) = k(d)*f(lambert) + K(s)*f(cook-torrance)
  2. f(lamber) = c / Π
  3. ∫(cook-torrance) = DFG / (4*dot(wo,n)*dot(wi,n))
  4. D:法线分布函数(NDF),估算微平面的整体取向,公式:a^2 / (Π * (NdotH^2) * (a^2-1) + 1)^2 (注:a表示粗糙度)
  5. F: 菲尼尔方程,用于描述表面反射光所占比例, 公式: F0 + (1 - F0) * pos(1 - cosTheta, 5) (注:F0表示不同材质的垂直方向的反射率,直接光照中cosTheta:HdotV或HdotL,间接光照中cosTheta:NdotV)
  6. G:几何函数,用于计算微表面,自阴影, 公式:cosTheta / (cosTheta(1.0 - k) + k) (注:k由粗糙度a计算,直接光照:k=(a+1)^2 / 8, 间接光照:k=a^2 / 2 , cosTheta需要分别计算NoV,NoL的和)
  7. K(d):(1-F)*(1-metallic)
  8. K(s):菲尼尔值里包括了表面反射系数,因此K(s)不需要
  • IBL间接光照
  1. CubeMap IrradianceMap 或者ShadeSH9() ,URP用SampleSH9() 球谐函数计算间接光照的diffuse
  2. 预高光积分图,或者高光积分算法计算间接光照的specular
    预高光积分图
    1
    2
    3
    4
    5
    6
    7
    8
    float2 IntegrateSpecularBRDF(float NoV, float roughness)
    {
    const float4 c0 = float4(-1, -0.0275, -0.572, 0.022);
    const float4 c1 = float4(1, 0.0425, 1.04, -0.04);
    float4 r = roughness * c0 + c1;
    float a004 = min(r.x * r.x, exp2(-9.28 * NoV)) * r.x + r.y;
    return float2(-c1.z, c1.z) * a004 + r.zw;
    }
  • 准备资源
    PBR基础颜色
    PBR法线
    PBR金属度
    PBR粗糙度

代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
Shader "Unlit/PBR"
{
Properties
{
_BaseColor("_BaseColor", Color) = (1, 1, 1, 1)
_MainTex("_MainTex", 2D) = "white" {}

_BumpScale("_BumpScale", Range(-1, 1)) = 1
[NoScaleOffset] _BumpMap("_BumpMap", 2D) = "bump" {}

[NoScaleOffset] _MetalnessMap("_MetalnessMap", 2D) = "black" {}
[NoScaleOffset] _RoughnessMap("_RoughnessMap", 2D) = "gray" {}

_IndirectIntensity("_IndirectIntensity", Range(0, 1)) = 1
//[NoScaleOffset] _IrradianceCube ("_IrradianceCube", Cube) = "black" {}
//[NoScaleOffset] _RadianceCube ("_RadianceCube", Cube) = "black" {}

[Toggle(USE_BRDF_INTEGRATION_MAP)] _UseBRDFIntegrationMap("_UseBRDFIntegrationMap", Float) = 0
[NoScaleOffset] _BRDFIntegrationMap("_BRDFIntegrationMap", 2D) = "black" {}
}
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
HLSLINCLUDE
#define EPSILSON 0.000001
#define BRDF_PI 3.14159265359
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

TEXTURE2D(_MainTex);
SAMPLER(sampler_MainTex);
TEXTURE2D(_BumpMap);
SAMPLER(sampler_BumpMap);

TEXTURE2D(_MetalnessMap);
SAMPLER(sampler_MetalnessMap);
TEXTURE2D(_RoughnessMap);
SAMPLER(sampler_RoughnessMap);
TEXTURE2D(_BRDFIntegrationMap);
SAMPLER(sampler_BRDFIntegrationMap);

CBUFFER_START(UnityPerMaterial)
float4 _MainTex_ST;
float4 _BaseColor;
float _BumpScale;
float _IndirectIntensity;
CBUFFER_END

struct Attributes
{
float4 positionOS:POSITION;
float4 normalOS:NORMAL;
float4 tangentOS:TANGENT;
float2 uv : TEXCOORD0;
};

struct Varyings
{
float4 positionHS : SV_POSITION;
float2 uv : TEXCOORD0;
float4 tangentWS:TEXCOORD1;
float4 BtangentWS:TEXCOORD2;
float4 normalWS:TEXCOORD3;
};

//菲涅尔函数
float3 FresnelSchlick(float NoV, float3 F0)
{
return F0 + (1.0 - F0) * pow(1.0 - NoV, 5);
}
//几何函数
float GeometrySchlickGGX(float NoV, float k)
{
return NoV / max(NoV * (1.0 - k) + k, EPSILSON);
}
//几何函数
float GeometrySmith(float NoV, float NoL, float k)
{
return GeometrySchlickGGX(NoV, k) * GeometrySchlickGGX(NoL, k);
}
//分布函数 alpha=roughness*roughness
float DistributionGGX(float NoH, float alpha)
{
float a2 = alpha * alpha;
float denom = pow(NoH * NoH * (a2 - 1.0) + 1.0, 2);
return a2 / max(denom * BRDF_PI, EPSILSON);
}
//计算直接光照
float3 CalcDirectLight(float metalness, float roughness, float3 albedo, float3 F0, float3 normal, float3 viewDir, float NoV, float3 worldPos)
{
//准备参数
Light mylight = GetMainLight();
float3 lightDir = normalize(mylight.direction); //获取光线方向
float3 floatDir = normalize(viewDir + lightDir); //计算半角方向
float NoL = saturate(dot(normal, lightDir)); //计算法线光线点积
float NoH = saturate(dot(normal, floatDir)); //计算法线半角点积
float HoL = saturate(dot(floatDir, lightDir)); //计算半角光线点积,同半角视线点积

//计算方程参数
float3 F = FresnelSchlick(HoL, F0); //计算菲涅尔
float G = GeometrySmith(NoV, NoL, pow(roughness + 1.0, 2) / 8.0); //计算遮挡
float D = DistributionGGX(NoH, roughness * roughness); //计算分布
float3 kD = (1.0 - F) * (1.0 - metalness); //计算漫反射系数

//计算直接光照结果
float3 directDiffuse = kD * albedo / BRDF_PI; //计算漫反射
float3 directSpecular = F * (D * G) / (4.0 * max(NoV * NoL, EPSILSON)); //计算高光
float3 directLightIn = mylight.color * BRDF_PI; //获取直接光照颜色
return (directDiffuse + directSpecular) * NoL * directLightIn;
}

float2 IntegrateSpecularBRDF(float NoV, float roughness)
{
const float4 c0 = float4(-1, -0.0275, -0.572, 0.022);
const float4 c1 = float4(1, 0.0425, 1.04, -0.04);
float4 r = roughness * c0 + c1;
float a004 = min(r.x * r.x, exp2(-9.28 * NoV)) * r.x + r.y;
return float2(-c1.z, c1.z) * a004 + r.zw;
}

real3 SampleSH(real3 normalWS)
{
// LPPV is not supported in Ligthweight Pipeline
real4 SHCoefficients[7];
SHCoefficients[0] = unity_SHAr;
SHCoefficients[1] = unity_SHAg;
SHCoefficients[2] = unity_SHAb;
SHCoefficients[3] = unity_SHBr;
SHCoefficients[4] = unity_SHBg;
SHCoefficients[5] = unity_SHBb;
SHCoefficients[6] = unity_SHC;

return max(real3(0, 0, 0), SampleSH9(SHCoefficients, normalWS));
}

//计算间接光照
float3 CalcIndirectLight(float metalness, float roughness, float3 albedo, float3 F0, float3 normal, float3 viewDir, float NoV)
{
//准备参数
float3 F = FresnelSchlick(NoV, F0); //计算菲涅尔
float3 kD = (1.0 - F) * (1.0 - metalness); //计算漫反射系数

//计算间接漫反射
//float3 indirectDiffuse = SAMPLE_TEXTURE2D(_IrradianceCube, normal).rgb;
float3 indirectDiffuse = SampleSH(normal);
indirectDiffuse *= kD * albedo;

//计算间接高光
float mip = PerceptualRoughnessToMipmapLevel(roughness); //计算粗糙度对应MIP
float3 reflDir = reflect(-viewDir, normal); //计算视线反射方向
//float3 indirectSpecular = SAMPLE_TEXTURECUBE_LOD(_RadianceCube, float4(reflDir, mip)).rgb;
float4 encodedIrradiance = SAMPLE_TEXTURECUBE_LOD(unity_SpecCube0, samplerunity_SpecCube0, reflDir, mip);
float3 indirectSpecular = DecodeHDREnvironment(encodedIrradiance, unity_SpecCube0_HDR);

//高光积分
#if USE_BRDF_INTEGRATION_MAP
float2 envBRDF = SAMPLE_TEXTURE2D(_BRDFIntegrationMap, sampler_BRDFIntegrationMap, float2(NoV, roughness)).rg;
#else
float2 envBRDF = IntegrateSpecularBRDF(NoV, roughness);
#endif
indirectSpecular *= F * envBRDF.x + envBRDF.y;

//计算间接光照结果
return (indirectDiffuse + indirectSpecular) * _IndirectIntensity;
}
ENDHLSL

Pass
{
Tags{ "LightMode" = "UniversalForward" }
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
#pragma shader_feature USE_BRDF_INTEGRATION_MAP

Varyings Vert(Attributes i)
{
Varyings o;
VertexPositionInputs input = GetVertexPositionInputs(i.positionOS.xyz);
o.positionHS = input.positionCS;

VertexNormalInputs normalInput = GetVertexNormalInputs(i.normalOS, i.tangentOS);
o.tangentWS.xyz = normalInput.tangentWS;
o.BtangentWS.xyz = normalInput.bitangentWS;
o.normalWS.xyz = normalInput.normalWS;
// 存一下世界空间坐标
o.tangentWS.w = input.positionWS.x;
o.BtangentWS.w = input.positionWS.y;
o.normalWS.w = input.positionWS.z;

o.uv = TRANSFORM_TEX(i.uv, _MainTex);
return o;
}

float4 Frag(Varyings i) :SV_Target{

float3 positionWS = float3(i.tangentWS.w,i.BtangentWS.w,i.normalWS.w);

float metalness = SAMPLE_TEXTURE2D(_MetalnessMap, sampler_MetalnessMap, i.uv).r;
float roughness = SAMPLE_TEXTURE2D(_RoughnessMap, sampler_RoughnessMap, i.uv).r;
float4 mainColor = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.uv);
float3 albedo = _BaseColor.rgb * mainColor.rgb;

float3x3 T2W = { i.tangentWS.xyz, i.BtangentWS.xyz, i.normalWS.xyz };

float4 norTex = SAMPLE_TEXTURE2D(_BumpMap, sampler_BumpMap, i.uv);
float3 normalTS = UnpackNormalScale(norTex, _BumpScale);
normalTS.z = pow(1 - pow(normalTS.x, 2) - pow(normalTS.y, 2), 0.5f); //规范化
float3 normalWS = normalize(mul(normalTS, T2W));
float3 viewDirWS = normalize(_WorldSpaceCameraPos.xyz - positionWS);

float3 F0 = lerp(0.04f, albedo, metalness);
float NoV = dot(normalWS, viewDirWS);

float3 directColor = CalcDirectLight(metalness, roughness, albedo, F0, normalWS, viewDirWS, NoV, positionWS);
float3 indirectColor = CalcIndirectLight(metalness, roughness, albedo, F0, normalWS, viewDirWS, NoV);

return float4(directColor + indirectColor, mainColor.a);
}
ENDHLSL
}
}
}

总结

构建一个通用的instance渲染流程

项目链接
目标:针对所有使用instance绘制的模型都可以方便做剔除,方便写shader,方便做材质特殊化参数处理

  1. 使用Graphics.DrawMeshInstancedIndirect进行批量绘制操作,官方用法说明
  2. 做culling包括:视锥裁剪,HiZ裁剪,以及四叉树加速,Tile加速等
  3. 通用shader库

Graphics.DrawMeshInstancedIndirect

Graphics.DrawMeshInstancedIndirect(instanceMesh, subMeshIndex, instanceMaterial, new Bounds(Vector3.zero, new Vector3(100.0f, 100.0f, 100.0f)), argsBuffer);

  1. bounds传绘制所有实例的包围盒,如果bounds被剔除则实例都不会被绘制

  2. argsBuffer绘制参数:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    uint[] args = new uint[5];
    if (instanceMesh != null) {
    args[0] = (uint)instanceMesh.GetIndexCount(subMeshIndex);
    args[1] = (uint)instanceCount;
    args[2] = (uint)instanceMesh.GetIndexStart(subMeshIndex);
    args[3] = (uint)instanceMesh.GetBaseVertex(subMeshIndex);
    }
    else
    {
    args[0] = args[1] = args[2] = args[3] = 0;
    }
    argsBuffer.SetData(args);
  3. 在computeshader中获取剔除完的结果
    visualbleBuffer = new ComputeBuffer(instances.Count, sizeof(uint), ComputeBufferType.Append);
    // 清空buffer
    visualbleBuffer.SetCounterValue(0);
    // 将buffer的数量,拷贝至argsBuffer,偏移4位是一个uint大小,即设置argsBuffer[1]的值
    ComputeBuffer.CopyCount(visualbleBuffer, argsBuffer, 4);

剔除

Unity中使用ComputeShader做视锥剔除(View Frustum Culling)
【Unity】使用Compute Shader实现Hi-z遮挡剔除(Occlusion Culling)

视锥剔除

首先确认我们要做什么:视锥是一个六面体,一个模型被裁减需要其包围盒的所有顶点都在六面体外面,判断点在六面体外,对每个面法线点乘,结果都<=0才在六面体内(六个面法线都指向六面体外部)

  1. 使用GeometryUtility.CalculateFrustumPlanes来直接获得视锥的六个面
    也可以自己拿相机上的far,near,FOV,aspect也可以计算出视锥顶点然后计算出面,一条法线以及一个点可以确定一个面,3个点能确定一个面

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    //一个点和一个法向量确定一个平面
    public static Vector4 GetPlane(Vector3 normal, Vector3 point)
    {
    return new Vector4(normal.x, normal.y, normal.z, -Vector3.Dot(normal, point));
    }
    //三点确定一个平面
    public static Vector4 GetPlane(Vector3 a, Vector3 b, Vector3 c)
    {
    Vector3 normal = Vector3.Normalize(Vector3.Cross(b - a, c - a));
    return GetPlane(normal, a);
    }
    //获取视锥体远平面的四个点
    public static Vector3[] GetCameraFarClipPlanePoint(Camera camera)
    {
    Vector3[] points = new Vector3[4];
    Transform transform = camera.transform;
    float distance = camera.farClipPlane;
    float halfFovRad = Mathf.Deg2Rad * camera.fieldOfView * 0.5f;
    float upLen = distance * Mathf.Tan(halfFovRad);
    float rightLen = upLen * camera.aspect;
    Vector3 farCenterPoint = transform.position + distance * transform.forward;
    Vector3 up = upLen * transform.up;
    Vector3 right = rightLen * transform.right;
    points[0] = farCenterPoint - up - right;//left-bottom
    points[1] = farCenterPoint - up + right;//right-bottom
    points[2] = farCenterPoint + up - right;//left-up
    points[3] = farCenterPoint + up + right;//right-up
    return points;
    }
    //获取视锥体的六个平面
    public static Vector4[] GetFrustumPlane(Camera camera)
    {
    Vector4[] planes = new Vector4[6];
    Transform transform = camera.transform;
    Vector3 cameraPosition = transform.position;
    Vector3[] points = GetCameraFarClipPlanePoint(camera);
    //顺时针
    planes[0] = GetPlane(cameraPosition, points[0], points[2]);//left
    planes[1] = GetPlane(cameraPosition, points[3], points[1]);//right
    planes[2] = GetPlane(cameraPosition, points[1], points[0]);//bottom
    planes[3] = GetPlane(cameraPosition, points[2], points[3]);//up
    planes[4] = GetPlane(-transform.forward, transform.position + transform.forward * camera.nearClipPlane);//near
    planes[5] = GetPlane(transform.forward, transform.position + transform.forward * camera.farClipPlane);//far
    return planes;
    }
  2. 确定物体包围盒(根据具体绘制物体),一个包围盒信息需要包括BoundMin,BoundMax,如下图:
    包围盒
    这个包围盒可以算是当前模型,模型空间的标准包围盒,最终裁剪放在世界坐标系下,需要对包围盒做顶点变化,结果相当于OBB包围盒

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    //包围盒的8个顶点
    float3(boundMin));
    float3(boundMax));
    float3(boundMax.x, boundMax.y, boundMin.z));
    float3(boundMax.x, boundMin.y, boundMax.z));
    float3(boundMax.x, boundMin.y, boundMin.z));
    float3(boundMin.x, boundMax.y, boundMax.z));
    float3(boundMin.x, boundMax.y, boundMin.z));
    float3(boundMin.x, boundMin.y, boundMax.z));

    // localToWorld变换矩阵
    Matrix4x4 localToWorldMatrix = Matrix4x4.TRS(position, quaternion, scale)
  3. 检测包围盒GeometryUtility.TestPlanesAABB,传入面和包围盒信息,在CPU处使用,为了优化我们将裁剪放在GPU处,那么过程就需要自己写了
    解释:平面方程:p = ax+by+cz+d,其中d = -dot(normal,O),O为平面上一点,abc对应normal.xyz,带入一个点A
    法线为(nx,ny,nz),由于O(ox,oy,oz)在平面上,那么就可以求出平面方程中D的值为:-(nxox+nyoy+nzoz)。如果我们把A(ax,ay,az)带入这个平面方程,可得:nxax+nyay+nzaz-(nxox+nyoy+nzoz)
    提取一下可得nx
    (ax-ox)+ny*(ay-oy)+nz*(az-oz),不就是法向量n与向量OA的点乘,那么P函数结果>0意味点A在平面上方,=0在平面上,<0在平面下,
    因此可以得出Vector3.Dot(planeNormal, boundPosition) + plane.w <= 0,意为在平面里面,因为平面法线朝外。
    8个点跟视锥6个面判断,只要有一个点在面内即认为该包围盒在面内,如果六个面符合,表示该包围盒在视锥内。

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    // plane[6] , boundVextex[8],对于6个面,8个顶点, plane.xyz存储normal,plane.w存distance
    public bool TestAABB(Vector4[] planes, Vector3[] boundVextex)
    {
    for (int i = 0; i < 6; i++)
    {
    for (int j = 0; j < 8; j++)
    {
    Vector4 plane = planes[i];
    Vector3 boundPosition = boundVextex[j];
    Vector3 planeNormal = new Vector3(plane.x, plane.y, plane.z);
    if (Vector3.Dot(planeNormal, boundPosition) + plane.w <= 0)
    break;
    if (j == 7)
    return false;
    }
    }
    return true;
    }
    // 能走出循环的则是没被剔除的
  4. 加速视锥剔除,上面只描述了如何剔除单个模型,那有如果有n个模型显示,我们循环n次剔除,压力非常大,加速视锥剔除方式可以采用四叉树来处理

  5. 以上是针对世界坐标系进行剔除的,如果在裁剪空间剔除则相当简单,先区分平台:
    OpenGL裁剪空间范围从x:[-w,w],y:[-w,w],z:[-w,w]
    DX裁剪空间范围从x:[-w,w],y:[-w,w],z:[0,w]
    只需要判断裁剪空间坐标的x,y,z轴是否符合以上规则即可

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    bool IsInClipSpace(float4 clipSpacePosition)
    {
    if (_IsOpenGL)
    return clipSpacePosition.x > -clipSpacePosition.w && clipSpacePosition.x < clipSpacePosition.w&&
    clipSpacePosition.y > -clipSpacePosition.w && clipSpacePosition.y < clipSpacePosition.w&&
    clipSpacePosition.z > -clipSpacePosition.w && clipSpacePosition.z < clipSpacePosition.w;
    else
    return clipSpacePosition.x > -clipSpacePosition.w && clipSpacePosition.x < clipSpacePosition.w&&
    clipSpacePosition.y > -clipSpacePosition.w && clipSpacePosition.y < clipSpacePosition.w&&
    clipSpacePosition.z > 0 && clipSpacePosition.z < clipSpacePosition.w;
    }

四叉树加速视锥剔除

  1. 关于如何构建四叉树,过程比较简单,一直往下递归将当前区域切割成4块,设置一个深度阈值或者长宽阈值,达到后不再继续
  2. 如何使用四叉树进行碰撞检测,可以参考:Unity 四叉树QuadTree应用之碰撞检测
  3. 主要是实现两个方法:插入模型,AABB碰撞检测,碰撞检测在上面已经说明的很详细了,只需要再实现插入模型算法:
    由于为四叉树,我们需要知道插入模型在一个区域的4块中哪一块,可以如果是跨区域的,在每块中都加入引用,也是一个递归的过程,代码中写的树同时加入了二叉树,八叉树的判断

四叉树加速检测

代码
  • 一些Pool方便缓存使用,节省GC

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    using System;
    using System.Collections;
    using System.Collections.Generic;

    public static class ListPool
    {
    private static Dictionary<int, Stack<IList>> Pool = new Dictionary<int, Stack<IList>>();

    public static List<T> Get<T>()
    {
    if (Pool.TryGetValue(typeof(T).GetHashCode(), out Stack<IList> stack))
    {
    if (stack.Count > 0)
    return stack.Pop() as List<T>;
    }
    return new List<T>();
    }

    public static void Release<T>(List<T> list)
    {
    if (list != null)
    {
    list.Clear();
    int key = typeof(T).GetHashCode();
    if (!Pool.TryGetValue(key, out Stack<IList> stack))
    {
    stack = new Stack<IList>();
    Pool.Add(key, stack);
    }
    stack.Push(list);
    }
    }
    }

    public static class DictionaryPool
    {
    private static Dictionary<int, Stack<IDictionary>> Pool = new Dictionary<int, Stack<IDictionary>>();

    public static Dictionary<Key, Value> Get<Key, Value>()
    {
    int key = typeof(Key).GetHashCode() + typeof(Value).GetHashCode();
    if (Pool.TryGetValue(key, out Stack<IDictionary> stack))
    {
    if (stack.Count > 0)
    return stack.Pop() as Dictionary<Key, Value>;
    }
    return new Dictionary<Key, Value>();
    }

    public static void Release<Key, Value>(Dictionary<Key, Value> dict)
    {
    if (dict != null)
    {
    dict.Clear();
    int key = typeof(Key).GetHashCode() + typeof(Value).GetHashCode();
    if (!Pool.TryGetValue(key, out Stack<IDictionary> stack))
    {
    stack = new Stack<IDictionary>();
    Pool.Add(key, stack);
    }
    stack.Push(dict);
    }
    }
    }

    public static class ClassPool
    {
    private static Dictionary<int, Stack<object>> Pool = new Dictionary<int, Stack<object>>();

    public static T Get<T>() where T : class
    {
    if (Pool.TryGetValue(typeof(T).GetHashCode(), out Stack<object> stack))
    {
    if (stack.Count > 0)
    return stack.Pop() as T;
    }
    return System.Activator.CreateInstance<T>();
    }

    public static void Release<T>(T instance)
    {
    if (instance != null)
    {
    int key = typeof(T).GetHashCode();
    if (!Pool.TryGetValue(key, out Stack<object> stack))
    {
    stack = new Stack<object>();
    Pool.Add(key, stack);
    }
    stack.Push(instance);
    }
    }
    }
  • 四叉树

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    163
    164
    165
    166
    167
    168
    169
    170
    171
    172
    173
    174
    175
    176
    177
    178
    179
    180
    181
    182
    183
    184
    185
    186
    187
    188
    189
    190
    191
    192
    193
    194
    195
    196
    197
    198
    199
    200
    201
    202
    203
    204
    205
    206
    207
    208
    209
    210
    211
    212
    213
    214
    215
    216
    217
    218
    219
    220
    221
    222
    223
    224
    225
    226
    227
    228
    229
    230
    231
    232
    233
    234
    235
    236
    237
    238
    239
    240
    241
    242
    243
    244
    245
    246
    247
    248
    249
    250
    251
    252
    253
    254
    255
    256
    257
    258
    259
    260
    261
    262
    263
    264
    265
    266
    267
    268
    269
    270
    271
    272
    273
    274
    275
    276
    277
    278
    279
    280
    281
    282
    283
    284
    285
    286
    287
    288
    289
    290
    291
    292
    293
    294
    295
    296
    297
    298
    299
    300
    301
    302
    303
    304
    305
    306
    307
    308
    309
    310
    311
    312
    313
    314
    315
    316
    317
    318
    319
    320
    321
    322
    323
    324
    325
    326
    327
    328
    329
    330
    331
    332
    333
    334
    335
    336
    337
    338
    339
    340
    341
    342
    343
    344
    345
    346
    347
    348
    349
    350
    351
    352
    353
    354
    355
    356
    357
    358
    359
    360
    361
    362
    363
    364
    365
    366
    367
    368
    369
    370
    371
    372
    373
    374
    375
    376
    377
    378
    379
    380
    381
    382
    383
    384
    385
    386
    387
    388
    389
    390
    391
    392
    393
    394
    395
    396
    397
    398
    399
    400
    401
    402
    403
    404
    405
    406
    407
    408
    409
    410
    411
    412
    413
    using System;
    using System.Collections.Generic;
    using UnityEngine;

    namespace Gameplay
    {
    public enum TreeType
    {
    /// <summary>
    /// 二叉树
    /// </summary>
    Two = 2,
    /// <summary>
    /// 四叉树
    /// </summary>
    Four = 4,
    /// <summary>
    /// 八叉树
    /// </summary>
    Eight = 8,
    }

    /// <summary>
    /// 用于物体数据二叉树/四叉树使用x,z,width,length,八叉树全部使用
    /// </summary>
    public interface IRect
    {
    Vector3 center { get; set; }
    float width { get; set; }
    float length { get; set; }
    float height { get; set; }
    uint id { get; set; }
    }

    /// <summary>
    /// 树用来分块的标记,三个值分别代表,x,z,y三个轴
    /// 二叉树只用第一个值x:L,R即1,2,
    /// 四叉树只用前两个值xz:LT,RT,RB,LB,即1,2,3,4
    /// 八叉树用三个值xzy
    /// </summary>
    public enum TreeBlitType
    {
    L_T_T = 1,
    R_T_T = 2,
    R_B_T = 3,
    L_B_T = 4,
    L_T_B = 5,
    R_T_B = 6,
    R_B_B = 7,
    L_B_B = 8,
    }

    public class Tree
    {
    /// <summary>
    /// 树类型
    /// </summary>
    public TreeType treeType;

    public Node root;

    private float angle;

    public Tree(TreeType treeType, float length, float width, float height, Vector3 center, float angle = 0)
    {
    this.treeType = treeType;
    root = ClassPool.Get<Node>().Set(treeType, length, width, height, center, 0, this);
    }

    public void UpdatePosition(Vector3 position, Vector3 eulerAngles)
    {
    if (position == root.center && eulerAngles.y == angle) return;

    angle = eulerAngles.y;
    var cacheMap = DictionaryPool.Get<uint, IRect>();
    root.UpdatePosition(position - root.center, cacheMap);

    foreach (var model in cacheMap.Values)
    {
    InsertTree(model);
    }
    DictionaryPool.Release(cacheMap);
    }

    /// <summary>
    /// 分割场景
    /// </summary>
    /// <param name="minLength">最小格子长度</param>
    /// <param name="minWidth">最小格子宽度</param>
    /// <param name="minHeight">最小格子高度</param>
    public void Partition(float minLength = 10, float minWidth = 10, float minHeight = 10, Func<Node, bool> compare = null)
    {
    root.Partition(minLength, minWidth, minHeight, compare == null ? DefaultPartitionCompare : compare);
    }

    private void InsertTreeChild(IRect model, Node node, int indexs, int type)
    {
    if ((indexs & 1 << type) == 1 << type)
    {
    InsertTree(model, node.childs[type - 1]);
    }
    }

    /// <summary>
    /// 将实际模型插入Tree
    /// </summary>
    public void InsertTree(IRect model, Node node = null)
    {
    node = node == null ? root : node;

    if (node.childs != null && node.childs.Count > 0)
    {
    int indexs = GetModelQuadrantIndex(model, node);
    if (indexs != 0)
    {
    for (int i = (int)TreeBlitType.L_T_T; i <= (int)TreeBlitType.L_B_B; i++)
    {
    InsertTreeChild(model, node, indexs, i);
    }
    }
    }
    else
    {
    node.AddModel(model);
    }
    }

    /// <summary>
    /// 获得模型在该结点的什么位置
    /// </summary>
    public int GetModelQuadrantIndex(IRect model, Node node)
    {
    Vector3 center = model.center;
    if (treeType != TreeType.Eight)
    {
    // 不是八叉树使用不到y轴
    center.y = root.center.y;
    }

    float halfWidth = model.width * 0.5f;
    float halfHeight = model.height * 0.5f;
    float halfLen = model.length * 0.5f;

    float minX = center.x - halfWidth;
    float maxX = center.x + halfWidth;
    float minY = center.y - halfHeight;
    float maxY = center.y + halfHeight;
    float minZ = center.z - halfLen;
    float maxZ = center.z + halfLen;

    halfWidth = node.width * 0.5f;
    halfHeight = node.height * 0.5f;
    halfLen = node.length * 0.5f;

    // 不在当前结点内
    if (minX > node.center.x + halfWidth
    || maxX < node.center.x - halfWidth
    || minY > node.center.y + halfHeight
    || maxY < node.center.y - halfHeight
    || minZ > node.center.z + halfLen
    || maxZ < node.center.z - halfLen)
    {
    return 0;
    }

    int indexs = 0;
    bool isLeft = minX <= node.center.x;
    bool isRight = maxX >= node.center.x;
    bool isTop = maxY >= node.center.y;
    bool isBottom = minY <= node.center.y;
    bool isFwd = maxZ >= node.center.z;
    bool isBackword = minZ <= node.center.z;

    switch (treeType)
    {
    case TreeType.Two:
    if (isLeft)
    {
    indexs |= 1 << (int)TreeBlitType.L_T_T;
    }
    if (isRight)
    {
    indexs |= 1 << (int)TreeBlitType.R_T_T;
    }
    break;
    case TreeType.Four:
    if (isLeft)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.L_T_T;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.L_B_T;
    }
    if (isRight)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.R_T_T;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.R_B_T;
    }
    break;
    case TreeType.Eight:
    if (isTop)
    {
    if (isLeft)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.L_T_T;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.L_B_T;
    }
    if (isRight)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.R_T_T;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.R_B_T;
    }
    }
    if (isBottom)
    {
    if (isLeft)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.L_T_B;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.L_B_B;
    }
    if (isRight)
    {
    if (isFwd)
    indexs |= 1 << (int)TreeBlitType.R_T_B;
    if (isBackword)
    indexs |= 1 << (int)TreeBlitType.R_B_B;
    }
    }
    break;
    }
    return indexs;
    }

    private bool DefaultPartitionCompare(Node node)
    {
    Vector3 dir = node.center - root.center;
    if (node.depth == 0)
    return true;

    float depth = (Camera.main.farClipPlane - dir.magnitude) / 200.0f;
    //if (Vector3.Dot(dir.normalized, Camera.main.transform.forward) > 0 && node.depth < depth)
    //{
    // return true;
    //}
    return node.depth < depth;
    }

    public void Cleanup()
    {
    if (root != null)
    root.Cleanup();
    }

    public void RemoveModels()
    {
    if (root != null)
    root.RemoveModels();
    }

    public void OnDrawGizmos()
    {
    if (root != null)
    root.OnDrawGizmos();
    }

    /// <summary>
    /// AABB校验
    /// </summary>
    /// <param name="cameraFrustumPlanes">相机视锥的六个面[0] = Left, [1] = Right, [2] = Down, [3] = Up, [4] = Near, [5] = Far</param>
    /// <param name="visibleGrass">可见模型</param>
    public void TestPlanesAABB<T>(Plane[] cameraFrustumPlanes, List<T> visibleModel, Node node = null) where T:IRect
    {
    node = node == null ? root : node;
    //if (cameraFrustumPlanes == null)
    //{
    // cameraFrustumPlanes = new Plane[6];
    // GeometryUtility.CalculateFrustumPlanes(Camera.main, cameraFrustumPlanes);
    //}

    node.ClearDebugColor();
    if (GeometryUtility.TestPlanesAABB(cameraFrustumPlanes, node.GetBounds()))
    {
    node.debugColor = Color.blue;
    if (node.models != null)
    {
    foreach (var model in node.models)
    {
    visibleModel.Add((T)model.Value);
    }
    }
    if (node.childs != null)
    {
    foreach (var child in node.childs)
    {
    TestPlanesAABB(cameraFrustumPlanes, visibleModel, child);
    }
    }
    }
    }
    public void TestPlanesAABB(Plane[] cameraFrustumPlanes, List<uint> visibleModel, Node node = null)
    {
    node = node == null ? root : node;
    //if (cameraFrustumPlanes == null)
    //{
    // cameraFrustumPlanes = new Plane[6];
    // GeometryUtility.CalculateFrustumPlanes(Camera.main, cameraFrustumPlanes);
    //}
    node.ClearDebugColor();
    if (GeometryUtility.TestPlanesAABB(cameraFrustumPlanes, node.GetBounds()))
    {
    node.debugColor = Color.blue;
    if (node.models != null)
    {
    foreach (var model in node.models)
    {
    visibleModel.Add(model.Key);
    }
    }
    if (node.childs != null)
    {
    foreach (var child in node.childs)
    {
    TestPlanesAABB(cameraFrustumPlanes, visibleModel, child);
    }
    }
    }
    }

    #region 自己获得视锥平面 并进行AABB检测
    //一个点和一个法向量确定一个平面
    public static Vector4 GetPlane(Vector3 normal, Vector3 point)
    {
    return new Vector4(normal.x, normal.y, normal.z, -Vector3.Dot(normal, point));
    }
    //三点确定一个平面
    public static Vector4 GetPlane(Vector3 a, Vector3 b, Vector3 c)
    {
    Vector3 normal = Vector3.Normalize(Vector3.Cross(b - a, c - a));
    return GetPlane(normal, a);
    }
    //获取视锥体远平面的四个点
    public static Vector3[] GetCameraFarClipPlanePoint(Camera camera)
    {
    Vector3[] points = new Vector3[4];
    Transform transform = camera.transform;
    float distance = camera.farClipPlane;
    float halfFovRad = Mathf.Deg2Rad * camera.fieldOfView * 0.5f;
    float upLen = distance * Mathf.Tan(halfFovRad);
    float rightLen = upLen * camera.aspect;
    Vector3 farCenterPoint = transform.position + distance * transform.forward;
    Vector3 up = upLen * transform.up;
    Vector3 right = rightLen * transform.right;
    points[0] = farCenterPoint - up - right;//left-bottom
    points[1] = farCenterPoint - up + right;//right-bottom
    points[2] = farCenterPoint + up - right;//left-up
    points[3] = farCenterPoint + up + right;//right-up
    return points;
    }
    //获取视锥体的六个平面
    public static Vector4[] GetFrustumPlane(Camera camera)
    {
    Vector4[] planes = new Vector4[6];
    Transform transform = camera.transform;
    Vector3 cameraPosition = transform.position;
    Vector3[] points = GetCameraFarClipPlanePoint(camera);
    //顺时针
    planes[0] = GetPlane(cameraPosition, points[0], points[2]);//left
    planes[1] = GetPlane(cameraPosition, points[3], points[1]);//right
    planes[2] = GetPlane(cameraPosition, points[1], points[0]);//bottom
    planes[3] = GetPlane(cameraPosition, points[2], points[3]);//up
    planes[4] = GetPlane(-transform.forward, transform.position + transform.forward * camera.nearClipPlane);//near
    planes[5] = GetPlane(transform.forward, transform.position + transform.forward * camera.farClipPlane);//far
    return planes;
    }

    public bool TestAABB(Vector4[] cameraFrustumPlanes, Bounds bounds)
    {
    Vector3[] boundVextex = new Vector3[8];
    boundVextex[0] = bounds.center + bounds.extents;
    boundVextex[1] = bounds.center - bounds.extents;
    boundVextex[2] = bounds.center + new Vector3(bounds.extents.x, bounds.extents.y, -bounds.extents.z);
    boundVextex[3] = bounds.center + new Vector3(bounds.extents.x, -bounds.extents.y, bounds.extents.z);
    boundVextex[4] = bounds.center + new Vector3(-bounds.extents.x, bounds.extents.y, bounds.extents.z);
    boundVextex[5] = bounds.center + new Vector3(-bounds.extents.x, -bounds.extents.y, bounds.extents.z);
    boundVextex[6] = bounds.center + new Vector3(-bounds.extents.x, bounds.extents.y, -bounds.extents.z);
    boundVextex[7] = bounds.center + new Vector3(bounds.extents.x, -bounds.extents.y, -bounds.extents.z);

    // 循环8个包围盒顶点
    for (int i = 0; i < 6; i++)
    {
    for (int j = 0; j < 8; j++)
    {
    Vector4 plane = cameraFrustumPlanes[i];
    Vector3 boundPosition = boundVextex[j];
    if (Vector3.Dot(new Vector3(plane.x, plane.y, plane.z), boundPosition) + plane.w <= 0)
    break;
    if (j == 7)
    return false;
    }
    }
    return true;
    }
    #endregion
    }
    }
  • 树结点

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    163
    164
    165
    166
    167
    168
    169
    170
    171
    172
    173
    174
    175
    176
    177
    178
    179
    180
    181
    182
    183
    184
    185
    186
    187
    188
    189
    190
    191
    192
    193
    194
    195
    196
    197
    198
    199
    200
    201
    202
    203
    204
    205
    206
    207
    208
    209
    210
    211
    212
    213
    214
    215
    216
    217
    218
    219
    220
    221
    222
    223
    224
    225
    226
    227
    228
    229
    230
    231
    232
    233
    234
    235
    236
    237
    238
    239
    240
    241
    242
    243
    244
    245
    246
    247
    248
    249
    250
    251
    252
    253
    254
    255
    256
    257
    258
    259
    260
    261
    262
    263
    264
    265
    using System;
    using System.Collections.Generic;
    using UnityEngine;

    namespace Gameplay
    {
    public class Node
    {
    public float width;
    public float length;
    public float height;
    public Vector3 center;
    public Node parent;
    public Tree tree;
    public List<Node> childs;
    public int depth;
    public Color debugColor;
    public Dictionary<uint, IRect> models;

    private TreeType treeType;

    public Node() { }

    public Node(TreeType treeType, float length, float width, float height, Vector3 center , int depth, Tree tree, Node parent = null)
    {
    Set(treeType, length, width, height, center, depth, tree, parent);
    }

    public Node Set(TreeType treeType, float length, float width, float height, Vector3 center, int depth, Tree tree, Node parent = null)
    {
    this.treeType = treeType;
    this.length = length;
    this.width = width;
    this.height = height;
    this.center = center;
    this.parent = parent;
    this.depth = depth;
    this.tree = tree;
    return this;
    }

    public void UpdatePosition(Vector3 offset, Dictionary<uint,IRect> removes)
    {
    center += offset;
    if (childs != null)
    {
    foreach (var child in childs)
    {
    child.UpdatePosition(offset, removes);
    }
    }
    if (models != null)
    {
    // 再校验一下model是不是在当前结点中,不在 移除重新加入
    List<uint> list = ListPool.Get<uint>();
    foreach (var model in models.Values)
    {
    if (!CheckModelInNode(model))
    {
    list.Add(model.id);
    }
    }
    foreach (var id in list)
    {
    if (!removes.ContainsKey(id))
    {
    removes.Add(id, models[id]);
    }
    models.Remove(id);
    }
    ListPool.Release(list);
    }
    }

    /// <summary>
    /// 模型是否在结点内
    /// </summary>
    private bool CheckModelInNode(IRect model)
    {
    float halfWidth = model.width * 0.5f;
    //float halfHeight = model.height * 0.5f;
    float halfLen = model.length * 0.5f;

    float minX = model.center.x - halfWidth;
    float maxX = model.center.x + halfWidth;
    //float minY = model.center.y - halfHeight;
    //float maxY = model.center.y + halfHeight;
    float minZ = model.center.z - halfLen;
    float maxZ = model.center.z + halfLen;

    halfWidth = width * 0.5f;
    //halfHeight = height * 0.5f;
    halfLen = length * 0.5f;

    // 不在当前结点内
    if (minX > center.x + halfWidth
    || maxX < center.x - halfWidth
    //|| minY > center.y + halfHeight
    //|| maxY < center.y - halfHeight
    || minZ > center.z + halfLen
    || maxZ < center.z - halfLen)
    {
    return false;
    }
    return true;
    }

    /// <summary>
    /// 分割场景
    /// </summary>
    /// <param name="minLength">最小格子长度</param>
    /// <param name="minWidth">最小格子宽度</param>
    /// <param name="minHeight">最小格子高度</param>
    public void Partition(float minLength, float minWidth, float minHeight, Func<Node, bool> compare = null)
    {
    if (length <= minLength || width <= minWidth || height <= minHeight)
    return;

    if (compare != null && !compare.Invoke(this))
    return;

    if (childs != null)
    childs.Clear();
    else
    childs = ListPool.Get<Node>();
    for (int i = 0; i < (int)treeType; i++)
    {
    childs.Add(null);
    }
    switch (treeType)
    {
    case TreeType.Two:
    float subLen = length > width ? length * 0.5f : length;
    float subWidth = length > width ? width : width * 0.5f;
    float subHeight = height;
    childs[(int)TreeBlitType.L_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center - new Vector3(width - subWidth, height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    break;
    case TreeType.Four:
    subLen = length * 0.5f;
    subWidth = width * 0.5f;
    subHeight = height;
    childs[(int)TreeBlitType.L_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_B_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, height - subHeight, -(length - subLen)) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.L_B_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), height - subHeight, -(length - subLen)) * 0.5f, depth + 1, tree, this);
    break;
    case TreeType.Eight:
    subLen = length * 0.5f;
    subWidth = width * 0.5f;
    subHeight = height * 0.5f;
    childs[(int)TreeBlitType.R_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_B_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, height - subHeight, -(length - subLen)) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.L_T_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), height - subHeight, length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.L_B_T - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), height - subHeight, -(length - subLen)) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_T_B - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, -(height - subHeight), length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.R_B_B - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(width - subWidth, -(height - subHeight), -(length - subLen)) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.L_T_B - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), -(height - subHeight), length - subLen) * 0.5f, depth + 1, tree, this);
    childs[(int)TreeBlitType.L_B_B - 1] = ClassPool.Get<Node>().Set(treeType, subLen, subWidth, subHeight, center + new Vector3(-(width - subWidth), -(height - subHeight), -(length - subLen)) * 0.5f, depth + 1, tree, this);
    break;
    }

    foreach (var node in childs)
    {
    node.Partition(minLength, minWidth, minHeight, compare);
    }
    }

    public void AddModel(IRect model)
    {
    if (models == null)
    models = DictionaryPool.Get<uint, IRect>();
    if (!models.ContainsKey(model.id))
    models.Add(model.id, model);
    }

    public Bounds GetBounds()
    {
    return new Bounds(center, new Vector3(width, height, length));
    }

    public void RemoveModels()
    {
    if (models != null)
    {
    DictionaryPool.Release(models);
    models = null;
    }
    if (childs != null)
    {
    foreach (var node in childs)
    {
    node.RemoveModels();
    }
    }
    }

    public void Cleanup()
    {
    if (models != null)
    {
    DictionaryPool.Release(models);
    models = null;
    }
    if (childs != null)
    {
    foreach (var node in childs)
    {
    node.Cleanup();
    ClassPool.Release(node);
    }
    ListPool.Release(childs);
    childs = null;
    }
    }

    public void ClearDebugColor()
    {
    debugColor = Gizmos.color;
    if (childs != null)
    {
    foreach (var child in childs)
    {
    child.ClearDebugColor();
    }
    }
    }

    public void OnDrawGizmos()
    {
    Vector3 fwd = Vector3.forward * length * 0.5f;
    Vector3 right = Vector3.right * width * 0.5f;
    Vector3 up = Vector3.up * height * 0.5f;
    Vector3 location = center;

    Color color = Gizmos.color;
    Gizmos.color = debugColor;

    Gizmos.DrawLine(location + fwd + right + up, location - fwd + right + up);
    Gizmos.DrawLine(location + fwd - right + up, location - fwd - right + up);
    Gizmos.DrawLine(location + fwd + right - up, location - fwd + right - up);
    Gizmos.DrawLine(location + fwd - right - up, location - fwd - right - up);

    Gizmos.DrawLine(location + fwd + right - up, location + fwd - right - up);
    Gizmos.DrawLine(location - fwd + right - up, location - fwd - right - up);
    Gizmos.DrawLine(location + fwd + right + up, location + fwd - right + up);
    Gizmos.DrawLine(location - fwd + right + up, location - fwd - right + up);

    Gizmos.DrawLine(location + fwd + right + up, location + fwd + right - up);
    Gizmos.DrawLine(location + fwd - right + up, location + fwd - right - up);
    Gizmos.DrawLine(location - fwd + right + up, location - fwd + right - up);
    Gizmos.DrawLine(location - fwd - right + up, location - fwd - right - up);

    Gizmos.color = color;

    if (childs != null)
    {
    foreach (var node in childs)
    {
    node.OnDrawGizmos();
    }
    }
    }
    }
    }
  • 实际使用流程

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    // 1. 生成树空间,设置大小
    Camera cam = Camera.main;
    Gameplay.Tree quadTree = new Gameplay.Tree(TreeType.Four, cam.farClipPlane, cam.farClipPlane, 2, cam.transform.position);

    // 2. 分割空间
    quadTree.Partition(10, 10, 1);

    // 3. 插入模型
    public struct Grass : IRect
    {
    private Vector3 m_Center;
    private float m_Width;
    private float m_Length;
    private float m_Height;
    private uint m_Id;

    public Vector3 center { get => m_Center; set => m_Center = value; }
    public float width { get => m_Width; set => m_Width = value; }
    public float length { get => m_Length; set => m_Length = value; }
    public float height { get => m_Height; set => m_Height = value; }
    public uint id { get => m_Id; set => m_Id = value; }
    }

    quadTree.RemoveModels();
    for (int i = 0; i < allGrassPos.Count; i++)
    {
    // 将模型加入到四叉树中
    quadTree.InsertTree(new Grass { center = allGrassPos[i], width = 0.2f, length = 0.2f, id = (uint)i });
    }

    // 4. 更新坐标,并进行AABB检测,grassIdSorted返回的是通过检测的结点下的所有模型id
    quadTree.UpdatePosition(cam.transform.position, cam.transform.localEulerAngles);
    grassIdSorted.Clear();
    quadTree.TestPlanesAABB(cameraFrustumPlanes, grassIdSorted);
总结

经测试发现使用四叉树剔除会导致CPU端消耗加大,可以使GPU并行线程数减少2/3,开启与否是一种取舍,对于数量并不是很大的实例绘制不需要开启

Tile管理加速视锥剔除

  1. 不同于四叉树,使用Tile等块划分草地,可以把每块剔除放进GPU执行,四叉树剔除需要递归因此不支持
  2. 思路:先确定草地的范围,再等块划分,使用ComputeShader先剔除格子,然后再对可视的格子的模型做逐一剔除,可以减少GPU线程数

Tile加速剔除

代码
  • 格子裁剪与基础模型裁剪思路一致,格子为一个长方体,裁剪完得到的_TileVisibleIdBuffer里面显示的格子=1

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    #pragma kernel TileCulling

    float4x4 _VPMatrix;
    float _MaxDrawDistance;

    // tile culling
    float2 _TileSize;
    StructuredBuffer<float3> _TilePositionBuffer;
    RWStructuredBuffer<uint> _TileVisibleIdBuffer;

    [numthreads(64, 1, 1)]
    void TileCulling(uint3 id : SV_DispatchThreadID)
    {
    float3 posWS = _TilePositionBuffer[id.x];
    float4 positionCS = mul(_VPMatrix, posWS + float3(_TileSize.x * 0.5, 0, _TileSize.y * 0.5));

    if (abs(positionCS.w) > _MaxDrawDistance)
    {
    return;
    }

    float3 boundMin = posWS;
    float3 boundMax = posWS + float3(_TileSize.x, 0, _TileSize.y);

    float4 boundVerts[8];
    boundVerts[0] = float4(boundMin, 1);
    boundVerts[1] = float4(boundMax, 1);
    boundVerts[2] = float4(boundMax.x, boundMax.y, boundMin.z, 1);
    boundVerts[3] = float4(boundMax.x, boundMin.y, boundMax.z, 1);
    boundVerts[4] = float4(boundMax.x, boundMin.y, boundMin.z, 1);
    boundVerts[5] = float4(boundMin.x, boundMax.y, boundMax.z, 1);
    boundVerts[6] = float4(boundMin.x, boundMax.y, boundMin.z, 1);
    boundVerts[7] = float4(boundMin.x, boundMin.y, boundMax.z, 1);

    bool isInClipSpace = false;
    for (int i = 0; i < 8; i++)
    {
    float4 clipSpace = mul(_VPMatrix, boundVerts[i]);
    if (!isInClipSpace && IsInClipSpace(clipSpace))
    {
    isInClipSpace = true;
    break;
    }
    }

    if (!isInClipSpace)
    return;

    _TileVisibleIdBuffer[id.x] = 1;
    }
  • C#代码 之后会放出继承类InstancedIndirectRender代码

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    163
    164
    165
    166
    167
    168
    169
    170
    171
    172
    173
    174
    175
    176
    177
    178
    179
    180
    181
    182
    183
    184
    185
    186
    187
    188
    189
    190
    191
    192
    193
    194
    195
    196
    197
    198
    199
    using System;
    using System.Collections.Generic;
    using System.Linq;
    using System.Text;
    using UnityEngine;
    using UnityEngine.Profiling;
    using UnityEngine.Rendering;

    class IndirectRendererTile : InstancedIndirectRender
    {
    public const string CS_TileCullingKernelName = "TileCulling";

    // 使用等块划分剔除
    public bool isUseGPUTileCulling;
    // 块大小 x:宽 y:长
    public Vector2 tileSize = new Vector2(30, 30);

    // 格子坐标
    private ComputeBuffer tilePositionBuffer;
    // 可以看见的格子
    private ComputeBuffer tileVisibleIdBuffer;
    // 格子剔除后,所有在可见格子内的可见模型id, 对于instances
    private ComputeBuffer otherVisibleIdBuffer;

    private List<Vector3> tiles;
    private List<uint>[] modelsInTile;
    private uint[] tileVisibles;
    private List<uint> visibleList;

    // 用于测试
    private uint[] debugTileVisible;

    protected override void UpdateBuffers()
    {
    base.UpdateBuffers();

    if (isUseGPUTileCulling && (tilePositionBuffer == null || tileVisibleIdBuffer == null))
    {
    int tileCountX = Mathf.CeilToInt((boundMax.x - boundMin.x) / tileSize.x);
    int tileCountZ = Mathf.CeilToInt((boundMax.z - boundMin.z) / tileSize.y);

    tiles = ListPool.Get<Vector3>();
    visibleList = ListPool.Get<uint>();

    modelsInTile = new List<uint>[tileCountX * tileCountZ];
    for (int i = 0; i < modelsInTile.Length; i++)
    {
    modelsInTile[i] = ListPool.Get<uint>();
    }
    for (int i = 0; i < instances.Count; i++)
    {
    Vector3 pos = instances[i].GetPosition();

    //find cellID
    int xID = Mathf.Min(tileCountX - 1, Mathf.FloorToInt(Mathf.InverseLerp(boundMin.x, boundMax.x, pos.x) * tileCountX)); //use min to force within 0~[cellCountX-1]
    int zID = Mathf.Min(tileCountZ - 1, Mathf.FloorToInt(Mathf.InverseLerp(boundMin.z, boundMax.z, pos.z) * tileCountZ)); //use min to force within 0~[cellCountZ-1]

    modelsInTile[zID + xID * tileCountZ].Add((uint)i);
    }

    for (int i = 0; i < tileCountX; i++)
    {
    for (int j = 0; j < tileCountZ; j++)
    {
    tiles.Add(new Vector3(boundMin.x + tileSize.x * i, 0, boundMin.z + tileSize.y * j));
    }
    }

    tilePositionBuffer = new ComputeBuffer(tiles.Count, sizeof(float) * 3);
    tilePositionBuffer.SetData(tiles);

    tileVisibles = new uint[tiles.Count];
    debugTileVisible = new uint[tiles.Count];
    tileVisibleIdBuffer = new ComputeBuffer(tiles.Count, sizeof(uint));
    }
    }
    protected override void UpdateRender(CommandBuffer cmd)
    {
    if (isUseGPUTileCulling && tiles.Count > 0)
    {
    Profiler.BeginSample("Tile Culling");

    Matrix4x4 v = cullingCamera.worldToCameraMatrix;
    Matrix4x4 p = cullingCamera.projectionMatrix;
    indirectComputerShader.SetMatrix("_VPMatrix", p * v);
    indirectComputerShader.SetFloat("_MaxDrawDistance", drawDistance);
    indirectComputerShader.SetVector("_TileSize", tileSize);

    int kernel = indirectComputerShader.FindKernel(CS_TileCullingKernelName);
    indirectComputerShader.SetBuffer(kernel, "_TilePositionBuffer", tilePositionBuffer);
    tileVisibleIdBuffer.SetData(tileVisibles);
    indirectComputerShader.SetBuffer(kernel, "_TileVisibleIdBuffer", tileVisibleIdBuffer);

    indirectComputerShader.Dispatch(kernel, Mathf.CeilToInt(tiles.Count / 64f), 1, 1);

    tileVisibleIdBuffer.GetData(tileVisibles);

    visibleList.Clear();
    for (int i = 0; i < tileVisibles.Length; i++)
    {
    debugTileVisible[i] = tileVisibles[i];
    if (tileVisibles[i] == 1)
    {
    foreach (uint id in modelsInTile[i])
    {
    visibleList.Add(id);
    }
    tileVisibles[i] = 0;
    }
    }

    if (visibleList.Count > 0)
    {
    indirectComputerShader.EnableKeyword("EnableTileCulling");
    if (otherVisibleIdBuffer != null)
    otherVisibleIdBuffer.Release();
    otherVisibleIdBuffer = new ComputeBuffer(visibleList.Count, sizeof(uint));
    otherVisibleIdBuffer.SetData(visibleList);
    indirectComputerShader.SetBuffer(0, "_OtherVisibleIdBuffer", otherVisibleIdBuffer);
    }

    Profiler.EndSample();
    }
    else
    {
    indirectComputerShader.DisableKeyword("EnableTileCulling");
    }

    base.UpdateRender(cmd);
    }
    protected override int GetDispatchCount()
    {
    if (isUseGPUTileCulling)
    return Mathf.CeilToInt(visibleList.Count / 64f);
    else
    return base.GetDispatchCount();
    }

    protected override void ReleaseBuffers()
    {
    base.ReleaseBuffers();

    if (tilePositionBuffer != null)
    {
    tilePositionBuffer.Release();
    tilePositionBuffer = null;
    }
    if (tileVisibleIdBuffer != null)
    {
    tileVisibleIdBuffer.Release();
    tileVisibleIdBuffer = null;
    }
    if (otherVisibleIdBuffer != null)
    {
    otherVisibleIdBuffer.Release();
    otherVisibleIdBuffer = null;
    }
    if (tiles != null)
    {
    ListPool.Release(tiles);
    tiles = null;
    }
    if (modelsInTile != null)
    {
    foreach (var list in modelsInTile)
    {
    ListPool.Release(list);
    }
    modelsInTile = null;
    }
    if (visibleList != null)
    {
    ListPool.Release(visibleList);
    visibleList = null;
    }
    }

    public override void OnDrawGizmosSelected()
    {
    base.OnDrawGizmosSelected();

    if (debugTileVisible != null)
    {
    for (int i = 0; i < debugTileVisible.Length; i++)
    {
    Color oldColor = Gizmos.color;
    Gizmos.color = debugTileVisible[i] == 1 ? Color.red : oldColor;

    Gizmos.DrawLine(tiles[i], tiles[i] + new Vector3(tileSize.x, 0));
    Gizmos.DrawLine(tiles[i], tiles[i] + new Vector3(0, 0, tileSize.y));
    Gizmos.DrawLine(tiles[i] + new Vector3(tileSize.x, 0, tileSize.y), tiles[i] + new Vector3(tileSize.x, 0));
    Gizmos.DrawLine(tiles[i] + new Vector3(tileSize.x, 0, tileSize.y), tiles[i] + new Vector3(0, 0, tileSize.y));

    Gizmos.color = oldColor;
    }
    }
    }
    }

总结

与四叉树剔除类似,主要目的是为了减少剔除次数,但放在GPU工作还是放在CPU工作,也需要取舍,如果不使用四叉树,全部剔除操作放在GPU,那么可以使用Tile加速剔除提升性能

Hierarchical Z-Buffering遮挡剔除

【Unity】使用Compute Shader实现Hi-z遮挡剔除(Occlusion Culling)
Compute Shader 进阶应用:结合Hi-Z 剔除海量草渲染

  • 我们要实现如下的效果:
    HiZ遮挡剔除
  1. 怎么做遮挡剔除?
    使用相机深度信息判断当前模型是否被完全遮挡,是则剔除

  2. 怎么获得深度信息?
    通过相机深度图_CameraDepthTexture

  3. 怎么判断是否被完全遮挡?
    判断模型在屏幕上所有像素的深度都大于相机深度图

  4. 怎么获取模型在屏幕上所有像素?
    根据物体的包围盒确定所在的屏幕坐标,但逐像素对比性能消耗很大

  5. 怎么进行深度信息对比?
    逻辑上是 包围盒覆盖的像素点 挨个都挡住物体,就裁剪物体。但这样需要对比很多个像素性能很差,所以提出了Hi-Z概念。就是把深度图创建出多个mipmaps,mip0 就是 原始深度图信息,mip1 就是1/4 mip0面积大小,4个mip0像素 取最远离相机的那个值写入一个mip1像素,mip2同理不断创建更低精度的图。这样一个物体包围盒 如果是在mip0图上 占据16x16像素。就不用对比256次了,只需要 找到mip4 上一个像素就可以了,因为这一个像素记录的是这16x16像素最远离相机的深度 如果它都挡住了物体那么 那么其他的像素更靠近相机 肯定就能确定整个物体都被挡住了

  6. 怎么获得深度图mipmaps?
    通过Graphics.CopyTexture中有一个参数是将原图片拷贝到目标图片的哪一级mip

  7. 怎么获得逐级递减的深度图信息?
    核心代码如下:
    shader中每次获得采样invSize一半的深度图

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    float HZBReduce(float2 inUV, float2 invSize)
    {
    float4 depth;
    float2 uv0 = inUV + float2(-0.25f, -0.25f) * invSize;
    float2 uv1 = inUV + float2(0.25f, -0.25f) * invSize;
    float2 uv2 = inUV + float2(-0.25f, 0.25f) * invSize;
    float2 uv3 = inUV + float2(0.25f, 0.25f) * invSize;

    depth.x = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv0);
    depth.y = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv1);
    depth.z = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv2);
    depth.w = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv3);
    #if defined(UNITY_REVERSED_Z)
    return min(min(depth.x, depth.y), min(depth.z, depth.w));
    #else
    return max(max(depth.x, depth.y), max(depth.z, depth.w));
    #endif
    }

    RenderFeatherz中一层层往下获取mipmap,针对像素<=8的用处不大,手动过滤

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    //Copy Depth To Buffer
    cmd.Blit(null, depthZBufferIdentifier, material, 0);
    int index = 0;
    while (size > 8)
    {
    int temporariesId = GetTemporariesTextureId(index);
    int prevId = GetTemporariesTextureId(index - 1);
    size >>= 1;
    size = Mathf.Max(size, 1);
    cmd.GetTemporaryRT(temporariesId, size, size, 0, FilterMode.Point, RenderTextureFormat.RHalf, RenderTextureReadWrite.Linear);
    if (index == 0)
    {
    cmd.SetGlobalTexture(HZShaderLibrary.HZMainTexName, depthZBufferIdentifier);
    cmd.Blit(depthZBufferIdentifier, temporariesId, material, 1);
    }
    else
    {
    cmd.SetGlobalTexture(HZShaderLibrary.HZMainTexName, prevId);
    cmd.Blit(prevId, temporariesId, material, 1);
    }
    cmd.CopyTexture(temporariesId, 0, 0, depthZBufferIdentifier, 0, index + 1);
    if (index > 0)
    {
    cmd.ReleaseTemporaryRT(prevId);
    }
    index++;
    }
  8. 代码中怎么进行HiZ剔除?

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    // use UNITY_REVERSED_Z need include this
    #include "HLSLSupport.cginc"

    // Hi-Z Culling
    inline bool IsVisibleAfterOcclusionCulling(float clipMinX, float clipMaxX, float clipMinY, float clipMaxY, float clipMinZ)
    {
    // [-1 , 1] to [0 , 1]
    float2 minXY = float2(clipMinX, clipMinY) * 0.5 + 0.5;
    float2 maxXY = float2(clipMaxX, clipMaxY) * 0.5 + 0.5;

    // Calculate hi-Z buffer mip
    int2 size = (maxXY - minXY) * _HiZTextureSize.xx;
    float mip = ceil(log2(max(size.x, size.y)));
    mip = clamp(mip, 0, 10);

    // find the max depth
    // Hi-Z approach that allows for more samples.
    // https://www.gamedev.net/blogs/entry/2249535-hierarchical-z-buffer-occlusion-culling-updated-07152011/
    // sampleCount = (xSamples + 1) * (xSamples + 1)
    const int xSamples = 2;
    const int ySamples = 2;
    float widthSS = (maxXY.x - minXY.x);
    float heightSS = (maxXY.y - minXY.y);
    float stepX = widthSS / xSamples;
    float stepY = heightSS / ySamples;

    float HIZdepth = 1;
    float yPos = minXY.y;
    for (int y = 0; y <= ySamples; ++y)
    {
    float xPos = minXY.x;
    for (int x = 0; x <= xSamples; ++x)
    {
    const float2 nCoords0 = float2(xPos, yPos);
    HIZdepth = min(HIZdepth, _HiZTextureTex.SampleLevel(sampler_HiZTextureTex, nCoords0, mip).r);
    xPos += stepX;
    }
    yPos += stepY;
    }

    #if UNITY_REVERSED_Z
    return (1.0 - clipMinZ) >= HIZdepth;
    #else
    return clipMinZ >= HIZdepth;
    #endif
    }
  9. 在使用HiZ深度图时,渲染模型需要在HiZ深度图绘制之后进行,因此需要自己控制实例渲染,在RenderFeather中先生成深度图,再进行渲染
    一个Pass用来生成深度图,另一个Pass用来渲染,下面为了方便使用 我们构建一个通用的instance渲染流程

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
    if (shader == null)
    return;
    m_ZBufferPass.Setup(shader);
    renderer.EnqueuePass(m_ZBufferPass);

    if (m_DrawIndirectPass.Setup())
    renderer.EnqueuePass(m_DrawIndirectPass);
    }

通用shader以及库

  • 通用库用于快速获取世界坐标,世界法线,IndirectShaderLibrary.hlsl

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    #ifndef INDIRECTSHADERLIBRARY_INCLUDE
    #define INDIRECTSHADERLIBRARY_INCLUDE

    //Matrix4x4 v = Camera.main.worldToCameraMatrix;
    //Matrix4x4 p = Camera.main.projectionMatrix;

    CBUFFER_START(UnityPerMaterial)
    // 一般为{1,0,0,0}{0,1,0,0}{0,0,1,0}{0,0,0,1}
    float4x4 _LocalToWorld;
    //用于计算位置的数据,用于根据参数初始化信息 position: xyz, scale : w
    StructuredBuffer<float4x4> _Positions;
    //计算完剪裁后所有需要渲染的ID , 通过该数量调用渲染接口
    StructuredBuffer<uint> _VisibleInstanceIds;
    CBUFFER_END


    float4x4 GetInstanceMatrix(uint instanceID)
    {
    return _Positions[_VisibleInstanceIds[instanceID]];
    }

    float3 GetInstanceMatrixPosition(uint instanceID)
    {
    float4x4 trsMat = GetInstanceMatrix(instanceID);
    return float3(trsMat[0][3], trsMat[1][3], trsMat[2][3]);
    }

    float4 GetInstancePositionOS(uint instanceID,float4 positionOS)
    {
    float4x4 trsMat = GetInstanceMatrix(instanceID);
    float4 localPosition = mul(trsMat,float4(positionOS.xyz,1)); //float4( data.xyz + positionOS.xyz * data.w ,1);
    return localPosition;
    }

    float3 GetInstancePositionWS(uint instanceID,float4 positionOS)
    {
    float4 localPosition = GetInstancePositionOS(instanceID,positionOS);
    float3 worldPosition = mul(_LocalToWorld, localPosition).xyz;
    return worldPosition;
    }

    float3 GetInstanceNormalWS(uint instanceID,float3 normalOS)
    {
    float4x4 rtsMat = GetInstanceMatrix(instanceID);
    return mul((float3x3)_LocalToWorld,mul((float3x3)rtsMat,normalOS));
    }

    #endif
  • 顶点着色器定义, uint instanceID : SV_InstanceID 接收实例id
    Varyings Vert(Attributes i, uint instanceID : SV_InstanceID)
    {
    float3 positionWS = GetInstancePositionWS(instanceID, i.positionOS);
    float3 normalWS = GetInstanceNormalWS(instanceID, i.normalOS);
    }

总结

构建这个instance流程为之后做大片草,树等渲染打好基础

草绘制

基础

菜鸡都能学会的Unity草地shader
利用GPU实现无尽草地的实时渲染
UnityURP-MobileDrawMeshInstancedIndirectExample
基于GPU Instance的草地渲染学习笔记

  • 早期项目的草一般都用一种简单的星型草
    每个草相当于一个实例,一个渲染大片草地的方案往往需要满足以下条件:
  1. 单个草的多边形不能过多,最好一棵草只用一个quad来表示
  2. 从不同的角度观察,草都必须显得密集
  3. 草的排布不能过于规则,否则会不自然
    因此出现了星型结构。
  • 利用Geometry Shader在GPU绘制草地
    对比于星型草效果可以更佳真实,绘制计算都放在GPU里,性能消耗更少,但是只能针对顶点统一处理,很难做特殊化如中间镂空一块,这样需要模型中间镂空
    思路:在一个模型上每个顶点上,新建草顶点(三角形)

  • GPUInstance渲染草体,可以一个DC渲染成片草地
    对比于Geometry Shader,这种方式制作更方便,更容易管理,性能消耗低,使用时只需要传入需要绘制草的坐标即可
    可以很方便配合实现地形画刷等编辑器工具。
    下面针对UnityURP-MobileDrawMeshInstancedIndirectExample项目进行步骤拆解,重新实现一遍

GPUInstance绘制草地

基于GPU Instance的草地渲染学习笔记这个里讲的已经很详细了,我按照自己思路再过一遍
上面已经跟着实现了一套通用的instance渲染流程,里面也包括了视锥裁剪,遮挡剔除等部分,现在要绘制实际草地是需要实现对应shader即可

  1. 草模型,为3个顶点的三角形片,做成广告牌
  2. 处理草的颜色,处理草与地面衔接的渐变色,受光情况,处理主光源以及附加光源
  3. 加入风场
  4. 加入草的交互效果,被其他模型挤压

过程描述

  1. 制作草的广告牌,之前我们在[屏幕炫光,更好的广告牌算法]章节中计算过:
    拿到模型中心点的世界坐标,转换至视图空间,加上模型空间偏移,再转换至裁剪空间
    由于是广告牌,我们加上Cull Back 将后面剔除节省一些消耗

    1
    2
    3
    4
    float4 pivotWS = float4(GetInstanceMatrixPosition(instanceID), 1);
    float4 pivotVS = mul(UNITY_MATRIX_V, pivotWS);
    float4 positionVS = pivotVS + float4(i.positionOS.xy, 0, 1);
    o.positionHS = mul(UNITY_MATRIX_P, positionVS);
  2. 处理草的颜色,在前面[多光源]章节中已经做过,为了达到草与地面的渐变,我们需要一个草的基本色_BaseColor,以及地面颜色_GroundColor,使用模型空间坐标Y轴来做插值判断

    1
    2
    3
    4
    5
    6
    7
    Light mainLight;
    #if _MAIN_LIGHT_SHADOWS
    mainLight = GetMainLight(TransformWorldToShadowCoord(positionWS));
    #else
    mainLight = GetMainLight();
    #endif

草地交互效果

代码

效果展示

总结

草的燃烧效果

基础

代码

效果展示

总结

配合Terrain制作自己的草刷

基础

代码

效果

总结

使用Terrain制作大世界,拆分地形块并分块加载

卡通水

基础

菜鸡都能学会的Unity卡通水shader

代码

总结

浅水

基础

代码

效果展示

总结

雪效果实现

链接

Unity3D游戏开发中100+效果的实现和源码大全 - 收藏起来肯定用得着
URP HLSL入门学习
高品质后处理:十种图像模糊算法的总结与实现