-
Notifications
You must be signed in to change notification settings - Fork 870
Expand file tree
/
Copy pathFinalBlitPass.cs
More file actions
379 lines (322 loc) · 20.2 KB
/
FinalBlitPass.cs
File metadata and controls
379 lines (322 loc) · 20.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
using System;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given color target to the current camera target
///
/// You can use this pass to copy the result of rendering to
/// the camera target. The pass takes the screen viewport into
/// consideration.
/// </summary>
public partial class FinalBlitPass : ScriptableRenderPass
{
static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
#if URP_COMPATIBILITY_MODE
RTHandle m_Source;
private PassData m_PassData;
#endif
// Use specialed URP fragment shader pass for debug draw support and color space conversion/encoding support.
// See CoreBlit.shader and BlitHDROverlay.shader
static class BlitPassNames
{
public const string NearestSampler = "NearestDebugDraw";
public const string BilinearSampler = "BilinearDebugDraw";
}
enum BlitType
{
Core = 0, // Core blit
HDR = 1, // Blit with HDR encoding and overlay UI compositing
Count = 2
}
struct BlitMaterialData
{
public Material material;
public int nearestSamplerPass;
public int bilinearSamplerPass;
}
BlitMaterialData[] m_BlitMaterialData;
/// <summary>
/// Creates a new <c>FinalBlitPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="blitMaterial">The <c>Material</c> to use for copying the executing the final blit.</param>
/// <param name="blitHDRMaterial">The <c>Material</c> to use for copying the executing the final blit when HDR output is active.</param>
/// <seealso cref="RenderPassEvent"/>
public FinalBlitPass(RenderPassEvent evt, Material blitMaterial, Material blitHDRMaterial)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.BlitFinalToBackBuffer);
#if URP_COMPATIBILITY_MODE
base.useNativeRenderPass = false;
m_PassData = new PassData();
#endif
renderPassEvent = evt;
// Find sampler passes by name
const int blitTypeCount = (int)BlitType.Count;
m_BlitMaterialData = new BlitMaterialData[blitTypeCount];
for (int i = 0; i < blitTypeCount; ++i)
{
m_BlitMaterialData[i].material = i == (int)BlitType.Core ? blitMaterial : blitHDRMaterial;
m_BlitMaterialData[i].nearestSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.NearestSampler) ?? -1;
m_BlitMaterialData[i].bilinearSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.BilinearSampler) ?? -1;
}
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Dispose()
{
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="baseDescriptor"></param>
/// <param name="colorHandle"></param>
[Obsolete("Use RTHandles for colorHandle. #from(2022.1) #breakingFrom(2023.1)", true)]
public void Setup(RenderTextureDescriptor baseDescriptor, RenderTargetHandle colorHandle)
{
throw new NotSupportedException("Setup with RenderTargetHandle has been deprecated. Use it with RTHandles instead.");
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="baseDescriptor"></param>
/// <param name="colorHandle"></param>
public void Setup(RenderTextureDescriptor baseDescriptor, RTHandle colorHandle)
{
#if URP_COMPATIBILITY_MODE
m_Source = colorHandle;
#endif
}
static void SetupHDROutput(ColorGamut hdrDisplayColorGamut, Material material, HDROutputUtils.Operation hdrOperation, Vector4 hdrOutputParameters, bool rendersOverlayUI)
{
material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, hdrOutputParameters);
HDROutputUtils.ConfigureHDROutput(material, hdrDisplayColorGamut, hdrOperation);
CoreUtils.SetKeyword(material, ShaderKeywordStrings.HDROverlay, rendersOverlayUI);
}
#if URP_COMPATIBILITY_MODE
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsoleteFrom2023_3)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
if (resolveToDebugScreen)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
#pragma warning restore CS0618
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsoleteFrom2023_3)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
bool outputsToHDR = renderingData.cameraData.isHDROutputActive;
bool outputsAlpha = false;
InitPassData(cameraData, ref m_PassData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha, false);
if (m_PassData.blitMaterialData.material == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_PassData.blitMaterialData, GetType().Name);
return;
}
var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
// Get RTHandle alias to use RTHandle apis
RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
var cmd = renderingData.commandBuffer;
if (m_Source == cameraData.renderer.GetCameraColorFrontBuffer(cmd))
{
m_Source = renderingData.cameraData.renderer.cameraColorTargetHandle;
}
using (new ProfilingScope(cmd, profilingSampler))
{
m_PassData.blitMaterialData.material.enabledKeywords = null;
debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, !resolveToDebugScreen);
cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion,
cameraData.requireSrgbConversion);
if (outputsToHDR)
{
VolumeStack stack = VolumeManager.instance.stack;
Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
Vector4 hdrOutputLuminanceParams;
UniversalRenderPipeline.GetHDROutputLuminanceParameters(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, tonemapping, out hdrOutputLuminanceParams);
HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
// If the HDRDebugView is on, we don't want the encoding
if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(cameraData.resolveFinalTarget))
hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
// Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
if (!cameraData.postProcessEnabled)
hdrOperation |= HDROutputUtils.Operation.ColorConversion;
SetupHDROutput(cameraData.hdrDisplayColorGamut, m_PassData.blitMaterialData.material, hdrOperation, hdrOutputLuminanceParams, cameraData.rendersOverlayUI);
}
if (resolveToDebugScreen)
{
// Blit to the debugger texture instead of the camera target
int shaderPassIndex = m_Source.rt?.filterMode == FilterMode.Bilinear ? m_PassData.blitMaterialData.bilinearSamplerPass : m_PassData.blitMaterialData.nearestSamplerPass;
Vector2 viewportScale = m_Source.useScaling ? new Vector2(m_Source.rtHandleProperties.rtHandleScale.x, m_Source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
Blitter.BlitTexture(cmd, m_Source, viewportScale, m_PassData.blitMaterialData.material, shaderPassIndex);
cameraData.renderer.ConfigureCameraTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
}
// TODO RENDERGRAPH: See https://jira.unity3d.com/projects/URP/issues/URP-1737
// This branch of the if statement must be removed for render graph and the new command list with a novel way of using Blitter with fill mode
else if (GL.wireframe && cameraData.isSceneViewCamera)
{
// This set render target is necessary so we change the LOAD state to DontCare.
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, // color
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare); // depth
cmd.Blit(m_Source.nameID, cameraTargetHandle.nameID);
}
else
{
// TODO: Final blit pass should always blit to backbuffer. The first time we do we don't need to Load contents to tile.
// We need to keep in the pipeline of first render pass to each render target to properly set load/store actions.
// meanwhile we set to load so split screen case works.
var loadAction = RenderBufferLoadAction.DontCare;
if (!cameraData.isSceneViewCamera && !cameraData.isDefaultViewport)
loadAction = RenderBufferLoadAction.Load;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
loadAction = RenderBufferLoadAction.Load;
#endif
CoreUtils.SetRenderTarget(renderingData.commandBuffer, cameraTargetHandle.nameID, loadAction, RenderBufferStoreAction.Store, ClearFlag.None, Color.clear);
// The final blit can't be easily avoided for the logo screen when using HDR, manually correct the scale bias when using nrp with render graph
Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(m_Source, cameraTargetHandle, cameraData);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, m_Source, cameraTargetHandle, cameraData, scaleBias);
cameraData.renderer.ConfigureCameraTarget(cameraTargetHandle, cameraTargetHandle);
}
}
}
#endif
private static void ExecutePass(RasterCommandBuffer cmd, PassData data, RTHandle source, RTHandle destination, UniversalCameraData cameraData, Vector4 scaleBias)
{
bool isRenderToBackBufferTarget = !cameraData.isSceneViewCamera;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
isRenderToBackBufferTarget = new RenderTargetIdentifier(destination.nameID, 0, CubemapFace.Unknown, -1) == new RenderTargetIdentifier(cameraData.xr.renderTarget, 0, CubemapFace.Unknown, -1);
#endif
var pixelRect = data.useFullScreenViewport ? new Rect(0f, 0f, Screen.width, Screen.height) : cameraData.pixelRect;
#if URP_COMPATIBILITY_MODE
RenderingUtils.SetupOffscreenUIViewportParams(data.blitMaterialData.material, ref pixelRect, GraphicsSettings.GetRenderPipelineSettings<RenderGraphSettings>().enableRenderCompatibilityMode ? false : isRenderToBackBufferTarget);
#else
RenderingUtils.SetupOffscreenUIViewportParams(data.blitMaterialData.material, ref pixelRect, isRenderToBackBufferTarget);
#endif
if (isRenderToBackBufferTarget)
cmd.SetViewport(pixelRect);
// turn off any global wireframe & "scene view wireframe shader hijack" settings for doing blits:
// we never want them to show up as wireframe
cmd.SetWireframe(false);
CoreUtils.SetKeyword(data.blitMaterialData.material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
int shaderPassIndex = source.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
Blitter.BlitTexture(cmd, source, scaleBias, data.blitMaterialData.material, shaderPassIndex);
}
private class PassData
{
internal TextureHandle source;
internal TextureHandle destination;
internal int sourceID;
internal Vector4 hdrOutputLuminanceParams;
internal bool requireSrgbConversion;
internal bool enableAlphaOutput;
internal BlitMaterialData blitMaterialData;
internal UniversalCameraData cameraData;
internal bool useFullScreenViewport;
}
/// <summary>
/// Initialize the shared pass data.
/// </summary>
/// <param name="passData"></param>
private void InitPassData(UniversalCameraData cameraData, ref PassData passData, BlitType blitType, bool enableAlphaOutput, bool useFullScreenViewport)
{
passData.cameraData = cameraData;
passData.requireSrgbConversion = cameraData.requireSrgbConversion;
passData.enableAlphaOutput = enableAlphaOutput;
passData.useFullScreenViewport = useFullScreenViewport;
passData.blitMaterialData = m_BlitMaterialData[(int)blitType];
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, UniversalCameraData cameraData, in TextureHandle src, in TextureHandle dest, TextureHandle overlayUITexture, bool useFullScreenViewport = false)
{
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
// Only the UniversalRenderer guarantees that global textures will be available at this point
bool isUniversalRenderer = (cameraData.renderer as UniversalRenderer) != null;
if (cameraData.requiresDepthTexture && isUniversalRenderer)
builder.UseGlobalTexture(s_CameraDepthTextureID);
bool outputsToHDR = cameraData.isHDROutputActive;
bool outputsAlpha = cameraData.isAlphaOutputEnabled;
InitPassData(cameraData, ref passData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha, useFullScreenViewport);
passData.sourceID = ShaderPropertyId.sourceTex;
passData.source = src;
builder.UseTexture(src, AccessFlags.Read);
passData.destination = dest;
// Default flag for non-XR common case
AccessFlags targetAccessFlag = AccessFlags.Write;
#if ENABLE_VR && ENABLE_XR_MODULE
// This is a screen-space pass, make sure foveated rendering is disabled for non-uniform renders
bool passSupportsFoveation = !XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster);
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
builder.SetExtendedFeatureFlags(ExtendedFeatureFlags.MultiviewRenderRegionsCompatible);
// Optimization: In XR, we don't have split screen use case.
// The access flag can be set to WriteAll if there is a full screen blit and no alpha blending,
// so engine will set loadOperation to DontCare down to the pipe.
if (cameraData.xr.enabled && cameraData.isDefaultViewport && !outputsAlpha)
targetAccessFlag = AccessFlags.WriteAll;
#endif
builder.SetRenderAttachment(dest, 0, targetAccessFlag);
if (outputsToHDR && overlayUITexture.IsValid())
{
VolumeStack stack = VolumeManager.instance.stack;
Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
UniversalRenderPipeline.GetHDROutputLuminanceParameters(passData.cameraData.hdrDisplayInformation, passData.cameraData.hdrDisplayColorGamut, tonemapping, out passData.hdrOutputLuminanceParams);
builder.UseTexture(overlayUITexture, AccessFlags.Read);
}
else
{
passData.hdrOutputLuminanceParams = new Vector4(-1.0f, -1.0f, -1.0f, -1.0f);
}
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
data.blitMaterialData.material.enabledKeywords = null;
context.cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion, data.requireSrgbConversion);
data.blitMaterialData.material.SetTexture(data.sourceID, data.source);
DebugHandler debugHandler = GetActiveDebugHandler(data.cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(data.cameraData.resolveFinalTarget);
// TODO RENDERGRAPH: this should ideally be shared in ExecutePass to avoid code duplication
if (data.hdrOutputLuminanceParams.w >= 0)
{
HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
// If the HDRDebugView is on, we don't want the encoding
if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(data.cameraData.resolveFinalTarget))
hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
// Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
if (!data.cameraData.postProcessEnabled)
hdrOperation |= HDROutputUtils.Operation.ColorConversion;
SetupHDROutput(data.cameraData.hdrDisplayColorGamut, data.blitMaterialData.material, hdrOperation, data.hdrOutputLuminanceParams, data.cameraData.rendersOverlayUI);
}
if (resolveToDebugScreen)
{
RTHandle sourceTex = data.source;
Vector2 viewportScale = sourceTex.useScaling ? new Vector2(sourceTex.rtHandleProperties.rtHandleScale.x, sourceTex.rtHandleProperties.rtHandleScale.y) : Vector2.one;
int shaderPassIndex = sourceTex.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
Blitter.BlitTexture(context.cmd, sourceTex, viewportScale, data.blitMaterialData.material, shaderPassIndex);
}
else
{
Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(context, in data.source, in data.destination);
ExecutePass(context.cmd, data, data.source, data.destination, data.cameraData, scaleBias);
}
});
}
}
}
}