通用渲染管线(URP)是Unity官方创建的可编程渲染管线(SRP)。URP提供了艺术家友好的工作流程,能够快速且简单创建跨平台的图形,从移动端到高端控制台和PC。SRP的内容可以参见:Unity渲染管线介绍。
Unity将渲染管线的代码分为了几个包,分别如下:
- com.unity.render-pipelines.core 此包包含了一些公共的可重用代码,URP和HDRP都会使用此包。
- com.unity.render-pipelines.high-definition-config HDRP配置包。
- com.unity.render-pipelines.high-definition HDRP的核心包。
- com.unity.render-pipelines.universal URP的核心包。
应用阶段
从名字我们可以看出,这个阶段是由我们的应用主导的,因此通常由CPU负责实现。换句话说,我们这些开发者具有这个阶段的绝对控制权。 在这阶段中,开发者有3个主要任务:
- 我们要准备好场景数据,例如摄像机的位置、视椎体、场景中包含了哪些模型、使用了哪些光源等等;
- 为了渲染性能,我们往往需要做一个粗粒度剔除(culling)工作,以把哪些不可见的物体踢出去,这样就不需要再移交给几何阶段进行处理;
- 最后我们要设置好每个模型的渲染状态,这些渲染状态包括但不限于它们使用的材质(漫反射颜色、高光反色颜色)、使用的纹理、使用的Shader等。
这一阶段最重要的输出是渲染所需的几何信息,即渲染图元(rendering primitives)。通俗来讲,渲染图元可以是点、线、三角面等。这些渲染图元将会被传递给下一个阶段——几何阶段。
URP渲染管线
RenderPipelineManager
RenderPipelineManager主要管理管线的创建和切换,以及一些渲染循环中的事件。
渲染入口RenderPipelineManager.DoRenderLoop_Internal, 此后函数是Unity Native Code调用的
1 | // Unity Native Code每帧调用 |
渲染循环事件:
1 | // 开始渲染 |
RenderPipeline
RenderPipeline的作用主要是提供了Render抽象函数(渲染的入口)以及提供渲染循环事件函数并调用到RenderPipelineManager中的对应事件。渲染循环事件函数提供给子类调用,避免每个子类写重复的代码。
核心函数:
1 | // 此函数由RenderPipelineManager.DoRenderLoop_Internal调用而来 |
UniversalRenderPipeline
UniversalRenderPipeline继承至RenderPipeline,并实现了Render函数,当然此类的核心函数就是Render。
核心函数:
1 | protected override void Render(ScriptableRenderContext renderContext, List<Camera> cameras) |
UniversalRenderPipeline.RenderCameraStack,渲染Base相机,以及Base相机的相机栈,此函数调用UniversalRenderPipeline.RenderSingleCamera函数来渲染单个相机:
1 | static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera) |
UniversalRenderPipeline.UpdateVolumeFramework,此函数用于更新后处理参数,主要是计算多个Volume之间插值后的最终参数值:
1 | static void UpdateVolumeFramework(Camera camera, UniversalAdditionalCameraData additionalCameraData) |
UniversalRenderPipeline.InitializeCameraData, 初始化相机设置(包括相机的基础参数和渲染目标描述器)大部分设置继承至Base相机的设置: 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26static void InitializeCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, out CameraData cameraData)
{
using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeCameraData);
// 创建CameraData
cameraData = new CameraData();
// 初始化相机的公共数据(Base和Overlay后会有的)
InitializeStackedCameraData(camera, additionalCameraData, ref cameraData);
// 初始化各种相机单独数据
InitializeAdditionalCameraData(camera, additionalCameraData, resolveFinalTarget, ref cameraData);
// 准备渲染目标描述器(Descriptor)需要的参数
// 获取MSSA级别
var renderer = additionalCameraData?.scriptableRenderer;
bool rendererSupportsMSAA = renderer != null && renderer.supportedRenderingFeatures.msaa;
int msaaSamples = 1;
if (camera.allowMSAA && asset.msaaSampleCount > 1 && rendererSupportsMSAA)
msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : asset.msaaSampleCount;
// 需要Alpha通道?
bool needsAlphaChannel = Graphics.preserveFramebufferAlpha;
// 创建渲染目标描述器(Descriptor)
cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData.renderScale,
cameraData.isHdrEnabled, msaaSamples, needsAlphaChannel, cameraData.requiresOpaqueTexture);
}
UniversalRenderPipeline.InitializeStackedCameraData, 初始相机栈中相机的公共设置参数,特定相机特有参数设置参见InitializeAdditionalCameraData函数: 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCameraData baseAdditionalCameraData, ref CameraData cameraData)
{
using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeStackedCameraData);
// 从相机获取targetTexture和cameraType
var settings = asset;
cameraData.targetTexture = baseCamera.targetTexture;
cameraData.cameraType = baseCamera.cameraType;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
///////////////////////////////////////////////////////////////////
// 环境和后处理设置 /
///////////////////////////////////////////////////////////////////
if (isSceneViewCamera)
{
cameraData.volumeLayerMask = 1; // "Default"
cameraData.volumeTrigger = null;
cameraData.isStopNaNEnabled = false;
cameraData.isDitheringEnabled = false;
cameraData.antialiasing = AntialiasingMode.None;
cameraData.antialiasingQuality = AntialiasingQuality.High;
}
else if (baseAdditionalCameraData != null)
{
cameraData.volumeLayerMask = baseAdditionalCameraData.volumeLayerMask;
cameraData.volumeTrigger = baseAdditionalCameraData.volumeTrigger == null ? baseCamera.transform : baseAdditionalCameraData.volumeTrigger;
cameraData.isStopNaNEnabled = baseAdditionalCameraData.stopNaN && SystemInfo.graphicsShaderLevel >= 35; // 将shader中Nan/Inf的值用黑色像素替代
cameraData.isDitheringEnabled = baseAdditionalCameraData.dithering; // 应用一个8位的抖动值,防止颜色条带的产生
cameraData.antialiasing = baseAdditionalCameraData.antialiasing; // 抗锯齿模式(FXAA和MSAA)
cameraData.antialiasingQuality = baseAdditionalCameraData.antialiasingQuality; // 抗锯齿质量
}
else
{
cameraData.volumeLayerMask = 1; // "Default"
cameraData.volumeTrigger = null;
cameraData.isStopNaNEnabled = false;
cameraData.isDitheringEnabled = false;
cameraData.antialiasing = AntialiasingMode.None;
cameraData.antialiasingQuality = AntialiasingQuality.High;
}
///////////////////////////////////////////////////////////////////
// 控制相机输出的设置 /
///////////////////////////////////////////////////////////////////
//HDR是否启用
cameraData.isHdrEnabled = baseCamera.allowHDR && settings.supportsHDR;
// 相机输出大小
Rect cameraRect = baseCamera.rect;
cameraData.pixelRect = baseCamera.pixelRect;
cameraData.pixelWidth = baseCamera.pixelWidth;
cameraData.pixelHeight = baseCamera.pixelHeight;
cameraData.aspectRatio = (float)cameraData.pixelWidth / (float)cameraData.pixelHeight;
cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f ||
Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f));
// 渲染缩放
const float kRenderScaleThreshold = 0.05f;
cameraData.renderScale = (Mathf.Abs(1.0f - settings.renderScale) < kRenderScaleThreshold) ? 1.0f : settings.renderScale;
// 确定最后使用的放大过滤器
cameraData.upscalingFilter = ResolveUpscalingFilterSelection(new Vector2(cameraData.pixelWidth, cameraData.pixelHeight), cameraData.renderScale, settings.upscalingFilter);
// 渲染缩放大于1则在最终渲染时,需要向下采样,否则向上采样
if (cameraData.renderScale > 1.0f)
{
cameraData.imageScalingMode = ImageScalingMode.Downscaling;
}
else if ((cameraData.renderScale < 1.0f) || (cameraData.upscalingFilter == ImageUpscalingFilter.FSR))
{
// 启用 FSR 后,我们仍将 100% 渲染比例视为放大操作。
//超分辨率(Super-resolution),有时候又称作放大(upscale, upsize),是一类提高视频或图像分辨率和质量的算法。在视频编辑和图像处理领域,超分辨率非常常见,比如监控视频的处理以及移动设备高分辨率摄像的需求,还有老照片、老电影的高清修复,都会用到超分辨率的技术。而在游戏领域,随着4K/8K分辨率的应用和光线追踪技术的引入,使用原生分辨率渲染图像对于硬件设备的压力确实较高,这时如果使用低分辨率渲染游戏,再通过超分辨率技术使之适应如今高分辨率的屏幕,那么仅会产生少量的画质损失,却能换来可观的帧率提升,必然会成为次时代高画质游戏扩大销量的有效手段之一。
cameraData.imageScalingMode = ImageScalingMode.Upscaling;
}
else
{
cameraData.imageScalingMode = ImageScalingMode.None;
}
//FSR 参数用于控制FSR的锐化强度的
cameraData.fsrOverrideSharpness = settings.fsrOverrideSharpness;
cameraData.fsrSharpness = settings.fsrSharpness;
// 渲染对象顺序规则
var commonOpaqueFlags = SortingCriteria.CommonOpaque;
var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU;
bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort;
cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags;
cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(baseCamera);
}
UniversalRenderPipeline.InitializeAdditionalCameraData, 初始化相机栈中相机特有参数:
1 | static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, ref CameraData cameraData) |
相机数据初始化完后,就调用UniversalRenderPipeline.RenderSingleCamera函数进行单个相机的渲染:
1 | static void RenderSingleCamera(ScriptableRenderContext context, CameraData cameraData, bool anyPostProcessingEnabled) |
在渲染单个相机之前,需要对渲染数据进行初始化,UniversalRenderPipeline.InitializeRenderingData: 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults,
bool anyPostProcessingEnabled, out RenderingData renderingData)
{
using var profScope = new ProfilingScope(null, Profiling.Pipeline.initializeRenderingData);
// 相机剔除后,对此次渲染有影响的灯光
var visibleLights = cullResults.visibleLights;
// 获取主灯光(如果Light窗口指定了太阳光(sun),则主光将是此太阳光,否则将获取亮度最强的方向光)
int mainLightIndex = GetMainLightIndex(settings, visibleLights);
bool mainLightCastShadows = false;
bool additionalLightsCastShadows = false;
// 需要阴影
if (cameraData.maxShadowDistance > 0.0f)
{
// 主灯光投射阴影?
mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null &&
visibleLights[mainLightIndex].light.shadows != LightShadows.None);
// 逐像素灯光才能投射阴影
if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel)
{
for (int i = 0; i < visibleLights.Length; ++i)
{
if (i == mainLightIndex)
continue;
Light light = visibleLights[i].light;
// 附加方向光目前不支持投射阴影
if ((visibleLights[i].lightType == LightType.Spot || visibleLights[i].lightType == LightType.Point) && light != null && light.shadows != LightShadows.None)
{
additionalLightsCastShadows = true;
break;
}
}
}
}
renderingData.cullResults = cullResults;
renderingData.cameraData = cameraData;
// 初始化灯光数据
InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData);
// 初始化阴影数据
InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData);
// 初始化后处理数据
InitializePostProcessingData(settings, out renderingData.postProcessingData);
// 其他渲染数据初始化
renderingData.supportsDynamicBatching = settings.supportsDynamicBatching;
renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount);
renderingData.postProcessingEnabled = anyPostProcessingEnabled;
CheckAndApplyDebugSettings(ref renderingData);
}
URP渲染器
一个渲染管线中可以包含多个渲染器,不同的相机可以使用不同的渲染器进行渲染。
ScriptableRenderer
ScriptableRenderer是所有渲染器的基类,提供了基础的功能,比如:设置相机矩阵,Shader中用的到一些变量,执行渲染器中的RendererFeature和Pass等。
1. 首先清除color, depth缓冲区,ScriptableRenderer.Clear:
1 | internal void Clear(CameraRenderType cameraType) |
2. 执行剔除前调用函数,ScriptableRenderer.OnPreCullRenderPasses
1 | internal void OnPreCullRenderPasses(in CameraData cameraData) |
3. 构建相机剔除参数ScriptableRenderer.SetupCullingParameters,每个子类具体实现
4. 构建各种渲染Pass, ScriptableRenderer.Setup,每个子类具体实现
5. 执行渲染,ScriptableRenderer.Execute
1 | public void Execute(ScriptableRenderContext context, ref RenderingData renderingData) |
UniversalRenderer
UniversalRenderer是ScriptableRenderer的子类,URP的标准渲染器。 它主要在复写的Setup函数中添加了各种的渲染Pass到m_ActiveRenderPassQueue中,核心的代码在基类ScriptableRenderer中。
核心成员:
- ForwardLights m_ForwardLights; // 前向渲染光照
- DeferredLights m_DeferredLights; // 延迟渲染光照
核心Pass:
- DepthOnlyPass 深度Pass
- MainLightShadowCasterPass 主灯光阴影投射Pass
- DrawObjectsPass 绘制对象Pass
- DrawSkyboxPass 绘制天空盒Pass
URP中的Pass
Pass是执行一个具体的渲染任务。
ScriptableRenderPass
ScriptableRenderPass是渲染Pass的基础类,提供一些Pass都应具备的属性,一些公共的方法和Pass的生命周期函数,具体如下:
Pass的共有属性:
- renderPassEvent 渲染Pass事件(BeforeRendering, BeforeRenderingGbuffer, BeforeRenderingOpaques, AfterRenderingOpaques, BeforeRenderingTransparents, AfterRenderingTransparents, BeforeRenderingPostProcessing和AfterRendering等 )。
- colorAttachment 颜色缓冲区纹理。
- depthAttachment 深度缓冲区纹理。
- input Pass的输入需求(ScriptableRenderPassInput.Depth,Normal,Color和Motion)。
- clearFlag 需要清除的是:Color,Depth和Stencil)。
- clearColor 清除的值是什么。
- renderTargetWidth/renderTargetHeight 渲染目标的宽度和高度。
- depthOnly 是仅深度?
- isLastPass 是最后一个Pass。
- renderPassQueueIndex 这帧中Pass的索引,就是在ScriptableRenderer.m_ActiveRenderPassQueue排序后的索引
公共的方法:
- ConfigureInput 配置此Pass的输入需求。
- ConfigureInputAttachments 配置输入附件纹理
- ConfigureTarget 配置渲染目标(colorAttachment和depthAttachment)
- ConfigureClear 配置清除标志
- Blit 拷贝纹理
- CreateDrawingSettings 创建绘制设置(使用的什么Tag的Shader, 排序规则是什么等。)
生命周期函数:
- OnCameraSetup 渲染一个相机前调用,所有Pass执行之前调用。
- Configure 执行一个Pass前调用。
- Execute 核心执行函数。
- OnCameraCleanup 渲染一个相机完成后调用。
- OnFinishCameraStackRendering 整个相机栈渲染完成后调用。
核心Pass
DepthOnlyPass 深度Pass
核心函数: 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73// Setup函数是由Render中调用过来的,它并不在ScriptableRenderPass基类中定义,它在OnCameraSetup之前调用,并不是所有的Pass都有此函数,如果需要自定义Render可以在Render的Setup中去调用Pass的Setup函数。
public void Setup(
RenderTextureDescriptor baseDescriptor,
RenderTargetHandle depthAttachmentHandle)
{
this.depthAttachmentHandle = depthAttachmentHandle;
baseDescriptor.colorFormat = RenderTextureFormat.Depth;
baseDescriptor.depthBufferBits = UniversalRenderer.k_DepthStencilBufferBits;
baseDescriptor.msaaSamples = 1;
descriptor = baseDescriptor;
this.allocateDepth = true;
this.shaderTagId = k_ShaderTagId;
}
// 所有Pass执行之前调用,相机渲染相关资源和数据(renderingData)
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
if (this.allocateDepth)
cmd.GetTemporaryRT(depthAttachmentHandle.id, descriptor, FilterMode.Point);
var desc = renderingData.cameraData.cameraTargetDescriptor;
// 配置深度Pass的渲染目标
if (renderingData.cameraData.renderer.useDepthPriming && (renderingData.cameraData.renderType == CameraRenderType.Base || renderingData.cameraData.clearDepth))
{
ConfigureTarget(renderingData.cameraData.renderer.cameraDepthTarget, descriptor.depthStencilFormat, desc.width, desc.height, 1, true);
}
else
{
ConfigureTarget(new RenderTargetIdentifier(depthAttachmentHandle.Identifier(), 0, CubemapFace.Unknown, -1), descriptor.depthStencilFormat, desc.width, desc.height, 1, true);
}
// 配置清除
ConfigureClear(ClearFlag.Depth, Color.black);
}
// 执行此Pass
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// 获取一个CommandBuff用于执行渲染
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.DepthPrepass)))
{
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
// 筛选需要绘制的对象,使用Shader中的Tag为"DepthOnly"的Pass进行渲染
var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags;
var drawSettings = CreateDrawingSettings(this.shaderTagId, ref renderingData, sortFlags);
drawSettings.perObjectData = PerObjectData.None;
// 绘制对象
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
// 相机渲染完成后
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
// 释放分配的资源
if (depthAttachmentHandle != RenderTargetHandle.CameraTarget)
{
if (this.allocateDepth)
cmd.ReleaseTemporaryRT(depthAttachmentHandle.id);
depthAttachmentHandle = RenderTargetHandle.CameraTarget;
}
}
MainLightShadowCasterPass 主灯光阴影投射Pass
核心函数: 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219// 构造函数
public MainLightShadowCasterPass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(MainLightShadowCasterPass));
renderPassEvent = evt;
// 每个级联阴影矩阵,切片数据和距离
m_MainLightShadowMatrices = new Matrix4x4[k_MaxCascades + 1];
m_CascadeSlices = new ShadowSliceData[k_MaxCascades];
m_CascadeSplitDistances = new Vector4[k_MaxCascades];
// 计算主灯光阴影需要Shader属性
MainLightShadowConstantBuffer._WorldToShadow = Shader.PropertyToID("_MainLightWorldToShadow");
MainLightShadowConstantBuffer._ShadowParams = Shader.PropertyToID("_MainLightShadowParams");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres0 = Shader.PropertyToID("_CascadeShadowSplitSpheres0");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres1 = Shader.PropertyToID("_CascadeShadowSplitSpheres1");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres2 = Shader.PropertyToID("_CascadeShadowSplitSpheres2");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres3 = Shader.PropertyToID("_CascadeShadowSplitSpheres3");
MainLightShadowConstantBuffer._CascadeShadowSplitSphereRadii = Shader.PropertyToID("_CascadeShadowSplitSphereRadii");
MainLightShadowConstantBuffer._ShadowOffset0 = Shader.PropertyToID("_MainLightShadowOffset0");
MainLightShadowConstantBuffer._ShadowOffset1 = Shader.PropertyToID("_MainLightShadowOffset1");
MainLightShadowConstantBuffer._ShadowOffset2 = Shader.PropertyToID("_MainLightShadowOffset2");
MainLightShadowConstantBuffer._ShadowOffset3 = Shader.PropertyToID("_MainLightShadowOffset3");
MainLightShadowConstantBuffer._ShadowmapSize = Shader.PropertyToID("_MainLightShadowmapSize");
m_MainLightShadowmap.Init("_MainLightShadowmapTexture");
}
// 设置渲染数据
public bool Setup(ref RenderingData renderingData)
{
using var profScope = new ProfilingScope(null, m_ProfilingSetupSampler);
// 不支持的时候,创建一个空(1x1)的阴影贴图
if (!renderingData.shadowData.supportsMainLightShadows)
return SetupForEmptyRendering(ref renderingData);
// 清除阴影贴图和级联数据
Clear();
// 检查是否有主灯光
int shadowLightIndex = renderingData.lightData.mainLightIndex;
if (shadowLightIndex == -1)
return SetupForEmptyRendering(ref renderingData);
// 主灯光是否支持阴影
VisibleLight shadowLight = renderingData.lightData.visibleLights[shadowLightIndex];
Light light = shadowLight.light;
if (light.shadows == LightShadows.None)
return SetupForEmptyRendering(ref renderingData);
// 只有方向光能作为主灯光
if (shadowLight.lightType != LightType.Directional)
{
Debug.LogWarning("Only directional lights are supported as main light.");
}
// 获取阴影的投射边界
Bounds bounds;
if (!renderingData.cullResults.GetShadowCasterBounds(shadowLightIndex, out bounds))
return SetupForEmptyRendering(ref renderingData);
// 获取级联数量
m_ShadowCasterCascadesCount = renderingData.shadowData.mainLightShadowCascadesCount;
// 根据总分辨率和指定级联数量,计算出单个级联的分辨率
int shadowResolution = ShadowUtils.GetMaxTileResolutionInAtlas(renderingData.shadowData.mainLightShadowmapWidth,
renderingData.shadowData.mainLightShadowmapHeight, m_ShadowCasterCascadesCount);
renderTargetWidth = renderingData.shadowData.mainLightShadowmapWidth;
renderTargetHeight = (m_ShadowCasterCascadesCount == 2) ?
renderingData.shadowData.mainLightShadowmapHeight >> 1 :
renderingData.shadowData.mainLightShadowmapHeight;
// 计算每个级联的矩阵,切片数据和距离
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
bool success = ShadowUtils.ExtractDirectionalLightMatrix(ref renderingData.cullResults, ref renderingData.shadowData,
shadowLightIndex, cascadeIndex, renderTargetWidth, renderTargetHeight, shadowResolution, light.shadowNearPlane,
out m_CascadeSplitDistances[cascadeIndex], out m_CascadeSlices[cascadeIndex]);
if (!success)
return SetupForEmptyRendering(ref renderingData);
}
// 获取阴影RT
m_MainLightShadowmapTexture = ShadowUtils.GetTemporaryShadowTexture(renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits);
m_MaxShadowDistanceSq = renderingData.cameraData.maxShadowDistance * renderingData.cameraData.maxShadowDistance;
m_CascadeBorder = renderingData.shadowData.mainLightShadowCascadeBorder;
m_CreateEmptyShadowmap = false;
useNativeRenderPass = true;
return true;
}
// 将阴影贴图设置为当前的颜色缓冲区,并清除它
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
ConfigureTarget(new RenderTargetIdentifier(m_MainLightShadowmapTexture), m_MainLightShadowmapTexture.depthStencilFormat, renderTargetWidth, renderTargetHeight, 1, true);
ConfigureClear(ClearFlag.All, Color.black);
}
// Pass的执行函数
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_CreateEmptyShadowmap)
{
SetEmptyMainLightCascadeShadowmap(ref context);
return;
}
RenderMainLightCascadeShadowmap(ref context, ref renderingData.cullResults, ref renderingData.lightData, ref renderingData.shadowData);
}
// 渲染主灯光的阴影贴图
void RenderMainLightCascadeShadowmap(ref ScriptableRenderContext context, ref CullingResults cullResults, ref LightData lightData, ref ShadowData shadowData)
{
int shadowLightIndex = lightData.mainLightIndex;
if (shadowLightIndex == -1)
return;
// 获取主灯光
VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex];
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow)))
{
// 构建一个绘制阴影的设置
var settings = new ShadowDrawingSettings(cullResults, shadowLightIndex);
settings.useRenderingLayerMaskTest = UniversalRenderPipeline.asset.supportsLightLayers;
// 设置每个级联的参数
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
settings.splitData = m_CascadeSlices[cascadeIndex].splitData;
Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, shadowLightIndex, ref shadowData, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].resolution);
ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.CastingPunctualLightShadow, false);
ShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex],
ref settings, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].viewMatrix);
}
// 启用主灯光阴影相关的宏
shadowData.isKeywordSoftShadowsEnabled = shadowLight.light.shadows == LightShadows.Soft && shadowData.supportsSoftShadows;
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadows, shadowData.mainLightShadowCascadesCount == 1);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, shadowData.mainLightShadowCascadesCount > 1);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.SoftShadows, shadowData.isKeywordSoftShadowsEnabled);
// 设置Shader常量
SetupMainLightShadowReceiverConstants(cmd, shadowLight, shadowData.supportsSoftShadows);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
void SetupMainLightShadowReceiverConstants(CommandBuffer cmd, VisibleLight shadowLight, bool supportsSoftShadows)
{
Light light = shadowLight.light;
bool softShadows = shadowLight.light.shadows == LightShadows.Soft && supportsSoftShadows;
int cascadeCount = m_ShadowCasterCascadesCount;
for (int i = 0; i < cascadeCount; ++i)
m_MainLightShadowMatrices[i] = m_CascadeSlices[i].shadowTransform;
// 设置级联阴影矩阵
Matrix4x4 noOpShadowMatrix = Matrix4x4.zero;
noOpShadowMatrix.m22 = (SystemInfo.usesReversedZBuffer) ? 1.0f : 0.0f;
for (int i = cascadeCount; i <= k_MaxCascades; ++i)
m_MainLightShadowMatrices[i] = noOpShadowMatrix;
float invShadowAtlasWidth = 1.0f / renderTargetWidth;
float invShadowAtlasHeight = 1.0f / renderTargetHeight;
float invHalfShadowAtlasWidth = 0.5f * invShadowAtlasWidth;
float invHalfShadowAtlasHeight = 0.5f * invShadowAtlasHeight;
float softShadowsProp = softShadows ? 1.0f : 0.0f;
// 根据距离计算缩放和Bias偏移
ShadowUtils.GetScaleAndBiasForLinearDistanceFade(m_MaxShadowDistanceSq, m_CascadeBorder, out float shadowFadeScale, out float shadowFadeBias);
// 设置Shader变量(贴图纹理,世界到阴影的矩阵,阴影参数)
cmd.SetGlobalTexture(m_MainLightShadowmap.id, m_MainLightShadowmapTexture);
cmd.SetGlobalMatrixArray(MainLightShadowConstantBuffer._WorldToShadow, m_MainLightShadowMatrices);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowParams,
new Vector4(light.shadowStrength, softShadowsProp, shadowFadeScale, shadowFadeBias));
// 设置级联参数
if (m_ShadowCasterCascadesCount > 1)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres0,
m_CascadeSplitDistances[0]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres1,
m_CascadeSplitDistances[1]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres2,
m_CascadeSplitDistances[2]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres3,
m_CascadeSplitDistances[3]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSphereRadii, new Vector4(
m_CascadeSplitDistances[0].w * m_CascadeSplitDistances[0].w,
m_CascadeSplitDistances[1].w * m_CascadeSplitDistances[1].w,
m_CascadeSplitDistances[2].w * m_CascadeSplitDistances[2].w,
m_CascadeSplitDistances[3].w * m_CascadeSplitDistances[3].w));
}
// 软阴影参数
if (supportsSoftShadows)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset0,
new Vector4(-invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset1,
new Vector4(invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset2,
new Vector4(-invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset3,
new Vector4(invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowmapSize, new Vector4(invShadowAtlasWidth,
invShadowAtlasHeight,
renderTargetWidth, renderTargetHeight));
}
}
DrawObjectsPass 绘制对象Pass
1 | // 构造函数 |
DrawSkyboxPass 绘制天空盒Pass
1 | public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) |