Initial project commit

This commit is contained in:
2026-01-08 16:50:20 +00:00
commit f0c5a8b267
29596 changed files with 4861782 additions and 0 deletions

View File

@@ -0,0 +1,360 @@
using System;
using System.Collections.Generic;
using Unity.Collections;
namespace UnityEngine.Rendering.Universal
{
internal struct AdditionalLightsShadowAtlasLayout
{
internal struct ShadowResolutionRequest
{
public ushort visibleLightIndex;
public ushort perLightShadowSliceIndex;
public ushort requestedResolution;
public ushort offsetX; // x coordinate of the square area allocated in the atlas for this shadow map
public ushort offsetY; // y coordinate of the square area allocated in the atlas for this shadow map
public ushort allocatedResolution; // width of the square area allocated in the atlas for this shadow map
[Flags]
private enum SettingsOptions : ushort
{
None = 0,
SoftShadow = (1 << 0),
PointLightShadow = (1 << 1),
All = 0xFFFF
}
private SettingsOptions m_ShadowProperties;
public bool softShadow
{
get => m_ShadowProperties.HasFlag(SettingsOptions.SoftShadow); // otherwise it's hard-shadow (no filtering)
set
{
if (value)
m_ShadowProperties |= SettingsOptions.SoftShadow;
else
m_ShadowProperties &= ~SettingsOptions.SoftShadow;
}
}
public bool pointLightShadow
{
get => m_ShadowProperties.HasFlag(SettingsOptions.PointLightShadow); // otherwise it's spot light shadow (1 shadow slice instead of 6)
set
{
if (value)
m_ShadowProperties |= SettingsOptions.PointLightShadow;
else
m_ShadowProperties &= ~SettingsOptions.PointLightShadow;
}
}
}
// Static fields used to avoid GC allocs of intermediate computations
static List<RectInt> s_UnusedAtlasSquareAreas; // This list tracks space available in the atlas
static List<ShadowResolutionRequest> s_ShadowResolutionRequests; // intermediate array used to compute the final resolution of each shadow slice rendered in the frame
static float[] s_VisibleLightIndexToCameraSquareDistance; // stores for each shadowed additional light its (squared) distance to camera ; used to sub-sort shadow requests according to how close their casting light is
static Func<ShadowResolutionRequest, ShadowResolutionRequest, int> s_CompareShadowResolutionRequest;
static ShadowResolutionRequest[] s_SortedShadowResolutionRequests;
NativeArray<ShadowResolutionRequest> m_SortedShadowResolutionRequests;
NativeArray<int> m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex; // for each visible light, store the index of its first shadow slice in m_SortedShadowResolutionRequests (for quicker access)
int m_TotalShadowSlicesCount;
int m_TotalShadowResolutionRequestCount;
bool m_TooManyShadowMaps;
int m_ShadowSlicesScaleFactor;
int m_AtlasSize;
public AdditionalLightsShadowAtlasLayout(UniversalLightData lightData, UniversalShadowData shadowData, UniversalCameraData cameraData)
{
bool useStructuredBuffer = RenderingUtils.useStructuredBuffer;
NativeArray<VisibleLight> visibleLights = lightData.visibleLights;
int numberOfVisibleLights = visibleLights.Length;
if (s_UnusedAtlasSquareAreas == null)
s_UnusedAtlasSquareAreas = new List<RectInt>();
if (s_ShadowResolutionRequests == null)
s_ShadowResolutionRequests = new List<ShadowResolutionRequest>();
if (s_VisibleLightIndexToCameraSquareDistance == null || s_VisibleLightIndexToCameraSquareDistance.Length < numberOfVisibleLights)
s_VisibleLightIndexToCameraSquareDistance = new float[numberOfVisibleLights];
if (s_CompareShadowResolutionRequest == null)
s_CompareShadowResolutionRequest = CreateCompareShadowResolutionRequesPredicate();
if (!useStructuredBuffer)
{
int newCapacity = UniversalRenderPipeline.maxVisibleAdditionalLights;
if (s_UnusedAtlasSquareAreas.Capacity < newCapacity)
s_UnusedAtlasSquareAreas.Capacity = newCapacity;
if (s_ShadowResolutionRequests.Count < numberOfVisibleLights)
{
s_ShadowResolutionRequests.Capacity = numberOfVisibleLights;
int diff = numberOfVisibleLights - s_ShadowResolutionRequests.Count + 1;
for (int i = 0; i < diff; i++)
s_ShadowResolutionRequests.Add(new ShadowResolutionRequest());
}
}
s_UnusedAtlasSquareAreas.Clear();
ushort totalShadowResolutionRequestsCount = 0; // Number of shadow slices that we would need for all shadowed additional (punctual) lights in the scene. We might have to ignore some of those requests if they do not fit in the shadow atlas.
for (int visibleLightIndex = 0; visibleLightIndex < visibleLights.Length; ++visibleLightIndex)
{
// Skip main directional light as it is not packed into the shadow atlas
if (visibleLightIndex == lightData.mainLightIndex)
{
s_VisibleLightIndexToCameraSquareDistance[visibleLightIndex] = float.MaxValue;
continue;
}
ref VisibleLight vl = ref visibleLights.UnsafeElementAt(visibleLightIndex);
Light light = vl.light;
LightType lightType = vl.lightType;
LightShadows lightShadows = light.shadows;
float shadowStrength = light.shadowStrength;
if (!ShadowUtils.IsValidShadowCastingLight(lightData, visibleLightIndex, lightType, lightShadows, shadowStrength))
{
s_VisibleLightIndexToCameraSquareDistance[visibleLightIndex] = float.MaxValue;
continue;
}
bool softShadows = (lightShadows == LightShadows.Soft);
bool pointLightShadow = (lightType == LightType.Point);
ushort visibleLightIndexUshort = (ushort)visibleLightIndex;
ushort requestedResolution = (ushort)shadowData.resolution[visibleLightIndex];
int shadowSlicesCountForThisLight = ShadowUtils.GetPunctualLightShadowSlicesCount(lightType);
for (ushort perLightShadowSliceIndex = 0; perLightShadowSliceIndex < shadowSlicesCountForThisLight; ++perLightShadowSliceIndex)
{
if (totalShadowResolutionRequestsCount >= s_ShadowResolutionRequests.Count)
s_ShadowResolutionRequests.Add(new ShadowResolutionRequest());
ShadowResolutionRequest request = s_ShadowResolutionRequests[totalShadowResolutionRequestsCount];
request.visibleLightIndex = visibleLightIndexUshort;
request.perLightShadowSliceIndex = perLightShadowSliceIndex;
request.requestedResolution = requestedResolution;
request.softShadow = softShadows;
request.pointLightShadow = pointLightShadow;
s_ShadowResolutionRequests[totalShadowResolutionRequestsCount] = request;
totalShadowResolutionRequestsCount++;
}
// mark this light as casting shadows
s_VisibleLightIndexToCameraSquareDistance[visibleLightIndex] = (cameraData.worldSpaceCameraPos - light.transform.position).sqrMagnitude;
}
if (s_SortedShadowResolutionRequests == null || s_SortedShadowResolutionRequests.Length < totalShadowResolutionRequestsCount)
s_SortedShadowResolutionRequests = new ShadowResolutionRequest[totalShadowResolutionRequestsCount];
for (int i = 0; i < totalShadowResolutionRequestsCount; ++i)
s_SortedShadowResolutionRequests[i] = s_ShadowResolutionRequests[i];
using (new ProfilingScope(Sorting.s_QuickSortSampler))
{
Sorting.QuickSort(s_SortedShadowResolutionRequests, 0, totalShadowResolutionRequestsCount - 1, s_CompareShadowResolutionRequest);
}
m_SortedShadowResolutionRequests = new NativeArray<ShadowResolutionRequest>(s_SortedShadowResolutionRequests, Allocator.Temp);
// To avoid visual artifacts when there is not enough place in the atlas, we remove shadow slices that would be allocated a too small resolution.
// When not using structured buffers, m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix.Length maps to _AdditionalLightsWorldToShadow in Shadows.hlsl
// In that case we have to limit its size because uniform buffers cannot be higher than 64kb for some platforms.
int totalShadowSlicesCount = useStructuredBuffer ? totalShadowResolutionRequestsCount : Math.Min(totalShadowResolutionRequestsCount, UniversalRenderPipeline.maxVisibleAdditionalLights); // Number of shadow slices that we will actually be able to fit in the shadow atlas without causing visual artifacts.
int atlasSize = shadowData.additionalLightsShadowmapWidth;
// Find biggest end index in m_SortedShadowResolutionRequests array, under which all shadow requests can be allocated a big enough shadow atlas slot, to not cause rendering artifacts
bool allShadowsAfterStartIndexHaveEnoughResolution = false;
int estimatedScaleFactor = 1;
while (!allShadowsAfterStartIndexHaveEnoughResolution && totalShadowSlicesCount > 0)
{
ShadowResolutionRequest request = m_SortedShadowResolutionRequests[totalShadowSlicesCount - 1];
estimatedScaleFactor = EstimateScaleFactorNeededToFitAllShadowsInAtlas(m_SortedShadowResolutionRequests, totalShadowSlicesCount, atlasSize);
// check if resolution of the least priority shadow slice request would be acceptable
if (request.requestedResolution >= estimatedScaleFactor * ShadowUtils.MinimalPunctualLightShadowResolution(request.softShadow))
allShadowsAfterStartIndexHaveEnoughResolution = true;
else // Skip shadow requests for this light ; their resolution is too small to look any good
totalShadowSlicesCount -= ShadowUtils.GetPunctualLightShadowSlicesCount(request.pointLightShadow ? LightType.Point : LightType.Spot);
}
for (int sortedArrayIndex = totalShadowSlicesCount; sortedArrayIndex < m_SortedShadowResolutionRequests.Length; ++sortedArrayIndex)
m_SortedShadowResolutionRequests[sortedArrayIndex] = default; // Reset entries that we cannot fit in the atlas
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex = new NativeArray<int>(visibleLights.Length, Allocator.Temp);
// Reset the reverse lookup array
for (int visibleLightIndex = 0; visibleLightIndex < m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex.Length; ++visibleLightIndex)
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex] = -1;
// Update the reverse lookup array (starting from the end of the array, in order to use index of slice#0 in case a same visibleLight has several shadowSlices)
for (int sortedArrayIndex = totalShadowSlicesCount - 1; sortedArrayIndex >= 0; --sortedArrayIndex)
{
int visibleLightIndex = s_SortedShadowResolutionRequests[sortedArrayIndex].visibleLightIndex;
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex] = sortedArrayIndex;
}
// Assigns to each of the first totalShadowSlicesCount items in m_SortedShadowResolutionRequests a location in the shadow atlas based on requested resolutions.
// If necessary, scales down shadow maps active in the frame, to make all of them fit in the atlas.
bool allShadowSlicesFitInAtlas = false;
bool tooManyShadows = false;
int shadowSlicesScaleFactor = estimatedScaleFactor;
while (!allShadowSlicesFitInAtlas && !tooManyShadows)
{
s_UnusedAtlasSquareAreas.Clear();
s_UnusedAtlasSquareAreas.Add(new RectInt(0, 0, atlasSize, atlasSize));
allShadowSlicesFitInAtlas = true;
for (int shadowRequestIndex = 0; shadowRequestIndex < totalShadowSlicesCount; ++shadowRequestIndex)
{
var resolution = m_SortedShadowResolutionRequests[shadowRequestIndex].requestedResolution / shadowSlicesScaleFactor;
if (resolution < ShadowUtils.MinimalPunctualLightShadowResolution(m_SortedShadowResolutionRequests[shadowRequestIndex].softShadow))
{
tooManyShadows = true;
break;
}
bool foundSpaceInAtlas = false;
// Try to find free space in the atlas
for (int unusedAtlasSquareAreaIndex = 0; unusedAtlasSquareAreaIndex < s_UnusedAtlasSquareAreas.Count; ++unusedAtlasSquareAreaIndex)
{
RectInt atlasArea = s_UnusedAtlasSquareAreas[unusedAtlasSquareAreaIndex];
int atlasAreaWidth = atlasArea.width;
if (atlasAreaWidth < resolution)
continue;
int atlasAreaHeight = atlasArea.height;
int atlasAreaX = atlasArea.x;
int atlasAreaY = atlasArea.y;
// we can use this atlas area for the shadow request
ref ShadowResolutionRequest shadowRequest = ref m_SortedShadowResolutionRequests.UnsafeElementAtMutable(shadowRequestIndex);
shadowRequest.offsetX = (ushort)atlasAreaX;
shadowRequest.offsetY = (ushort)atlasAreaY;
shadowRequest.allocatedResolution = (ushort) resolution;
// this atlas space is not available anymore, so remove it from the list
s_UnusedAtlasSquareAreas.RemoveAt(unusedAtlasSquareAreaIndex);
// make sure to split space so that the rest of this square area can be used
int remainingShadowRequestsCount = totalShadowSlicesCount - shadowRequestIndex - 1; // (no need to add more than that)
int newSquareAreasCount = 0;
int newSquareAreaWidth = resolution; // we split the area in squares of same size
int newSquareAreaHeight = resolution;
int newSquareAreaX = atlasAreaX;
int newSquareAreaY = atlasAreaY;
while (newSquareAreasCount < remainingShadowRequestsCount)
{
newSquareAreaX += newSquareAreaWidth;
if (newSquareAreaX + newSquareAreaWidth > (atlasAreaX + atlasAreaWidth))
{
newSquareAreaX = atlasAreaX;
newSquareAreaY += newSquareAreaHeight;
if (newSquareAreaY + newSquareAreaHeight > (atlasAreaY + atlasAreaHeight))
break;
}
// replace the space we removed previously by new smaller squares (inserting them in this order ensures shadow maps will be packed at the side of the atlas, without gaps)
s_UnusedAtlasSquareAreas.Insert(unusedAtlasSquareAreaIndex + newSquareAreasCount, new RectInt(newSquareAreaX, newSquareAreaY, newSquareAreaWidth, newSquareAreaHeight));
++newSquareAreasCount;
}
foundSpaceInAtlas = true;
break;
}
if (!foundSpaceInAtlas)
{
allShadowSlicesFitInAtlas = false;
break;
}
}
if (!allShadowSlicesFitInAtlas && !tooManyShadows)
shadowSlicesScaleFactor *= 2;
}
m_TooManyShadowMaps = tooManyShadows;
m_ShadowSlicesScaleFactor = shadowSlicesScaleFactor;
m_TotalShadowSlicesCount = totalShadowSlicesCount;
m_TotalShadowResolutionRequestCount = totalShadowResolutionRequestsCount;
m_AtlasSize = atlasSize;
}
public int GetTotalShadowSlicesCount() => m_TotalShadowSlicesCount;
public int GetTotalShadowResolutionRequestCount() => m_TotalShadowResolutionRequestCount;
public bool HasTooManyShadowMaps() => m_TooManyShadowMaps;
public int GetShadowSlicesScaleFactor() => m_ShadowSlicesScaleFactor;
public int GetAtlasSize() => m_AtlasSize;
public bool HasSpaceForLight(int originalVisibleLightIndex)
{
return m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[originalVisibleLightIndex] != -1;
}
public ShadowResolutionRequest GetSortedShadowResolutionRequest(int sortedShadowResolutionRequestIndex)
{
return m_SortedShadowResolutionRequests[sortedShadowResolutionRequestIndex];
}
public ShadowResolutionRequest GetSliceShadowResolutionRequest(int originalVisibleLightIndex, int sliceIndex)
{
int sortedShadowResolutionRequestIndex = m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[originalVisibleLightIndex];
return m_SortedShadowResolutionRequests[sortedShadowResolutionRequestIndex + sliceIndex];
}
public static void ClearStaticCaches()
{
s_UnusedAtlasSquareAreas = null;
s_ShadowResolutionRequests = null;
s_VisibleLightIndexToCameraSquareDistance = null;
s_CompareShadowResolutionRequest = null;
s_SortedShadowResolutionRequests = null;
}
static int EstimateScaleFactorNeededToFitAllShadowsInAtlas(in NativeArray<ShadowResolutionRequest> shadowResolutionRequests, int endIndex, int atlasSize)
{
long totalTexelsInShadowAtlas = atlasSize * atlasSize;
long totalTexelsInShadowRequests = 0;
for (int shadowRequestIndex = 0; shadowRequestIndex < endIndex; ++shadowRequestIndex)
totalTexelsInShadowRequests += shadowResolutionRequests[shadowRequestIndex].requestedResolution * shadowResolutionRequests[shadowRequestIndex].requestedResolution;
int estimatedScaleFactor = 1;
while (totalTexelsInShadowRequests > totalTexelsInShadowAtlas * estimatedScaleFactor * estimatedScaleFactor)
estimatedScaleFactor *= 2;
return estimatedScaleFactor;
}
// Sort array in decreasing requestedResolution order,
// sub-sorting in "HardShadow > SoftShadow",
// i.e place last requests that will be removed in priority to make room for the others,
// because their resolution is too small to produce good-looking shadows ; or because they take relatively more space in the atlas )
// sub-sub-sorting in light distance to camera
// then grouping in increasing visibleIndex (and sub-sorting each group in ShadowSliceIndex order)
static Func<ShadowResolutionRequest, ShadowResolutionRequest, int> CreateCompareShadowResolutionRequesPredicate()
{
return (ShadowResolutionRequest curr, ShadowResolutionRequest other) =>
{
return (((curr.requestedResolution > other.requestedResolution)
|| (curr.requestedResolution == other.requestedResolution && !curr.softShadow && other.softShadow)
|| (curr.requestedResolution == other.requestedResolution && curr.softShadow == other.softShadow && s_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] < s_VisibleLightIndexToCameraSquareDistance[other.visibleLightIndex])
|| (curr.requestedResolution == other.requestedResolution && curr.softShadow == other.softShadow && s_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] == s_VisibleLightIndexToCameraSquareDistance[other.visibleLightIndex] && curr.visibleLightIndex < other.visibleLightIndex)
|| (curr.requestedResolution == other.requestedResolution && curr.softShadow == other.softShadow && s_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] == s_VisibleLightIndexToCameraSquareDistance[other.visibleLightIndex] && curr.visibleLightIndex == other.visibleLightIndex && curr.perLightShadowSliceIndex < other.perLightShadowSliceIndex)))
? -1 : 1;
};
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6fa1a543f4721444d94e0b3200fa22a8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f46c1a0c3e3a98848a0efbf4e1fd5675
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,74 @@
using System;
using System.Collections.Generic;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Let customizable actions inject commands to capture the camera output.
///
/// You can use this pass to inject capture commands into a command buffer
/// with the goal of having camera capture happening in external code.
/// </summary>
internal class CapturePass : ScriptableRenderPass
{
RTHandle m_CameraColorHandle;
public CapturePass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler("Capture Camera output");
renderPassEvent = evt;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
CommandBuffer cmdBuf = renderingData.commandBuffer;
m_CameraColorHandle = renderingData.cameraData.renderer.GetCameraColorBackBuffer(cmdBuf);
using (new ProfilingScope(cmdBuf, profilingSampler))
{
var colorAttachmentIdentifier = m_CameraColorHandle.nameID;
var captureActions = renderingData.cameraData.captureActions;
for (captureActions.Reset(); captureActions.MoveNext();)
captureActions.Current(colorAttachmentIdentifier, renderingData.commandBuffer);
}
}
private class UnsafePassData
{
internal TextureHandle source;
public IEnumerator<Action<RenderTargetIdentifier, CommandBuffer>> captureActions;
}
// This function needs to add an unsafe render pass to Render Graph because a raster render pass, which is typically
// used for rendering with Render Graph, cannot perform the texture readback operations performed with the command
// buffer in CameraTextureProvider. Unsafe passes can do certain operations that raster render passes cannot do and
// have access to the full command buffer API.
public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
using (var builder = renderGraph.AddUnsafePass<UnsafePassData>(passName, out var passData, profilingSampler))
{
// Setup up the pass data with cameraColor, which has the correct orientation and position in a built player
passData.source = resourceData.cameraColor;
passData.captureActions = cameraData.captureActions;
// Setup up the builder
builder.AllowPassCulling(false);
builder.UseTexture(resourceData.cameraColor);
builder.SetRenderFunc((UnsafePassData data, UnsafeGraphContext unsafeContext) =>
{
var nativeCommandBuffer = CommandBufferHelpers.GetNativeCommandBuffer(unsafeContext.cmd);
var captureActions = data.captureActions;
for (data.captureActions.Reset(); data.captureActions.MoveNext();)
captureActions.Current(data.source, nativeCommandBuffer);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0f5a9bd148c9a4a3198fdd2365b6e514
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,347 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
// Note: this pass can't be done at the same time as post-processing as it needs to be done in
// advance in case we're doing on-tile color grading.
/// <summary>
/// Renders a color grading LUT texture.
/// </summary>
public class ColorGradingLutPass : ScriptableRenderPass
{
readonly Material m_LutBuilderLdr;
readonly Material m_LutBuilderHdr;
internal readonly GraphicsFormat m_HdrLutFormat;
internal readonly GraphicsFormat m_LdrLutFormat;
PassData m_PassData;
RTHandle m_InternalLut;
bool m_AllowColorGradingACESHDR = true;
/// <summary>
/// Creates a new <c>ColorGradingLutPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="data">The <c>PostProcessData</c> resources to use.</param>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="PostProcessData"/>
public ColorGradingLutPass(RenderPassEvent evt, PostProcessData data)
{
profilingSampler = new ProfilingSampler("Blit Color LUT");
renderPassEvent = evt;
overrideCameraTarget = true;
Material Load(Shader shader)
{
if (shader == null)
{
Debug.LogError($"Missing shader. ColorGradingLutPass render pass will not execute. Check for missing reference in the renderer resources.");
return null;
}
return CoreUtils.CreateEngineMaterial(shader);
}
m_LutBuilderLdr = Load(data.shaders.lutBuilderLdrPS);
m_LutBuilderHdr = Load(data.shaders.lutBuilderHdrPS);
// Warm up lut format as IsFormatSupported adds GC pressure...
// UUM-41070: We require `Linear | Render` but with the deprecated FormatUsage this was checking `Blend`
// For now, we keep checking for `Blend` until the performance hit of doing the correct checks is evaluated
const GraphicsFormatUsage kFlags = GraphicsFormatUsage.Blend;
if (SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, kFlags))
m_HdrLutFormat = GraphicsFormat.R16G16B16A16_SFloat;
else if (SystemInfo.IsFormatSupported(GraphicsFormat.B10G11R11_UFloatPack32, kFlags))
// Precision can be too low, if FP16 primary renderTarget is requested by the user.
// But it's better than falling back to R8G8B8A8_UNorm in the worst case.
m_HdrLutFormat = GraphicsFormat.B10G11R11_UFloatPack32;
else
// Obviously using this for log lut encoding is a very bad idea for precision but we
// need it for compatibility reasons and avoid black screens on platforms that don't
// support floating point formats. Expect banding and posterization artifact if this
// ends up being used.
m_HdrLutFormat = GraphicsFormat.R8G8B8A8_UNorm;
m_LdrLutFormat = GraphicsFormat.R8G8B8A8_UNorm;
base.useNativeRenderPass = false;
if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3 && Graphics.minOpenGLESVersion <= OpenGLESVersion.OpenGLES30 && SystemInfo.graphicsDeviceName.StartsWith("Adreno (TM) 3"))
m_AllowColorGradingACESHDR = false;
m_PassData = new PassData();
}
/// <summary>
/// Sets up the pass.
/// </summary>
/// <param name="internalLut">The RTHandle to use to render to.</param>
/// <seealso cref="RTHandle"/>
public void Setup(in RTHandle internalLut)
{
m_InternalLut = internalLut;
}
/// <summary>
/// Get a descriptor and filter mode for the required texture for this pass
/// </summary>
/// <param name="postProcessingData">The pass will use settings from <c>PostProcessingData</c> for the pass.</param>
/// <param name="descriptor">The <c>RenderTextureDescriptor</c> used by the pass.</param>
/// <param name="filterMode">The <c>FilterMode</c> used by the pass.</param>
public void ConfigureDescriptor(in PostProcessingData postProcessingData, out RenderTextureDescriptor descriptor, out FilterMode filterMode)
{
ConfigureDescriptor(postProcessingData.universalPostProcessingData, out descriptor, out filterMode);
}
/// <summary>
/// Get a descriptor and filter mode for the required texture for this pass
/// </summary>
/// <param name="postProcessingData">The pass will use settings from <c>PostProcessingData</c> for the pass.</param>
/// <param name="descriptor">The <c>RenderTextureDescriptor</c> used by the pass.</param>
/// <param name="filterMode">The <c>FilterMode</c> used by the pass.</param>
public void ConfigureDescriptor(in UniversalPostProcessingData postProcessingData, out RenderTextureDescriptor descriptor, out FilterMode filterMode)
{
bool hdr = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
int lutHeight = postProcessingData.lutSize;
int lutWidth = lutHeight * lutHeight;
var format = hdr ? m_HdrLutFormat : m_LdrLutFormat;
descriptor = new RenderTextureDescriptor(lutWidth, lutHeight, format, 0);
descriptor.vrUsage = VRTextureUsage.None; // We only need one for both eyes in VR
filterMode = FilterMode.Bilinear;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalPostProcessingData postProcessingData = frameData.Get<UniversalPostProcessingData>();
m_PassData.cameraData = cameraData;
m_PassData.postProcessingData = postProcessingData;
m_PassData.lutBuilderLdr = m_LutBuilderLdr;
m_PassData.lutBuilderHdr = m_LutBuilderHdr;
m_PassData.allowColorGradingACESHDR = m_AllowColorGradingACESHDR;
#if ENABLE_VR && ENABLE_XR_MODULE
if (renderingData.cameraData.xr.supportsFoveatedRendering)
renderingData.commandBuffer.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
#endif
CoreUtils.SetRenderTarget(renderingData.commandBuffer, m_InternalLut, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.None, Color.clear);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, m_InternalLut);
}
private class PassData
{
internal UniversalCameraData cameraData;
internal UniversalPostProcessingData postProcessingData;
internal Material lutBuilderLdr;
internal Material lutBuilderHdr;
internal bool allowColorGradingACESHDR;
internal TextureHandle internalLut;
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHandle internalLutTarget)
{
var lutBuilderLdr = passData.lutBuilderLdr;
var lutBuilderHdr = passData.lutBuilderHdr;
var allowColorGradingACESHDR = passData.allowColorGradingACESHDR;
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.ColorGradingLUT)))
{
// Fetch all color grading settings
var stack = VolumeManager.instance.stack;
var channelMixer = stack.GetComponent<ChannelMixer>();
var colorAdjustments = stack.GetComponent<ColorAdjustments>();
var curves = stack.GetComponent<ColorCurves>();
var liftGammaGain = stack.GetComponent<LiftGammaGain>();
var shadowsMidtonesHighlights = stack.GetComponent<ShadowsMidtonesHighlights>();
var splitToning = stack.GetComponent<SplitToning>();
var tonemapping = stack.GetComponent<Tonemapping>();
var whiteBalance = stack.GetComponent<WhiteBalance>();
bool hdr = passData.postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
// Prepare texture & material
var material = hdr ? lutBuilderHdr : lutBuilderLdr;
// Prepare data
var lmsColorBalance = ColorUtils.ColorBalanceToLMSCoeffs(whiteBalance.temperature.value, whiteBalance.tint.value);
var hueSatCon = new Vector4(colorAdjustments.hueShift.value / 360f, colorAdjustments.saturation.value / 100f + 1f, colorAdjustments.contrast.value / 100f + 1f, 0f);
var channelMixerR = new Vector4(channelMixer.redOutRedIn.value / 100f, channelMixer.redOutGreenIn.value / 100f, channelMixer.redOutBlueIn.value / 100f, 0f);
var channelMixerG = new Vector4(channelMixer.greenOutRedIn.value / 100f, channelMixer.greenOutGreenIn.value / 100f, channelMixer.greenOutBlueIn.value / 100f, 0f);
var channelMixerB = new Vector4(channelMixer.blueOutRedIn.value / 100f, channelMixer.blueOutGreenIn.value / 100f, channelMixer.blueOutBlueIn.value / 100f, 0f);
var shadowsHighlightsLimits = new Vector4(
shadowsMidtonesHighlights.shadowsStart.value,
shadowsMidtonesHighlights.shadowsEnd.value,
shadowsMidtonesHighlights.highlightsStart.value,
shadowsMidtonesHighlights.highlightsEnd.value
);
var (shadows, midtones, highlights) = ColorUtils.PrepareShadowsMidtonesHighlights(
shadowsMidtonesHighlights.shadows.value,
shadowsMidtonesHighlights.midtones.value,
shadowsMidtonesHighlights.highlights.value
);
var (lift, gamma, gain) = ColorUtils.PrepareLiftGammaGain(
liftGammaGain.lift.value,
liftGammaGain.gamma.value,
liftGammaGain.gain.value
);
var (splitShadows, splitHighlights) = ColorUtils.PrepareSplitToning(
splitToning.shadows.value,
splitToning.highlights.value,
splitToning.balance.value
);
int lutHeight = passData.postProcessingData.lutSize;
int lutWidth = lutHeight * lutHeight;
var lutParameters = new Vector4(lutHeight, 0.5f / lutWidth, 0.5f / lutHeight,
lutHeight / (lutHeight - 1f));
// Fill in constants
material.SetVector(ShaderConstants._Lut_Params, lutParameters);
material.SetVector(ShaderConstants._ColorBalance, lmsColorBalance);
material.SetVector(ShaderConstants._ColorFilter, colorAdjustments.colorFilter.value.linear);
material.SetVector(ShaderConstants._ChannelMixerRed, channelMixerR);
material.SetVector(ShaderConstants._ChannelMixerGreen, channelMixerG);
material.SetVector(ShaderConstants._ChannelMixerBlue, channelMixerB);
material.SetVector(ShaderConstants._HueSatCon, hueSatCon);
material.SetVector(ShaderConstants._Lift, lift);
material.SetVector(ShaderConstants._Gamma, gamma);
material.SetVector(ShaderConstants._Gain, gain);
material.SetVector(ShaderConstants._Shadows, shadows);
material.SetVector(ShaderConstants._Midtones, midtones);
material.SetVector(ShaderConstants._Highlights, highlights);
material.SetVector(ShaderConstants._ShaHiLimits, shadowsHighlightsLimits);
material.SetVector(ShaderConstants._SplitShadows, splitShadows);
material.SetVector(ShaderConstants._SplitHighlights, splitHighlights);
// YRGB curves
material.SetTexture(ShaderConstants._CurveMaster, curves.master.value.GetTexture());
material.SetTexture(ShaderConstants._CurveRed, curves.red.value.GetTexture());
material.SetTexture(ShaderConstants._CurveGreen, curves.green.value.GetTexture());
material.SetTexture(ShaderConstants._CurveBlue, curves.blue.value.GetTexture());
// Secondary curves
material.SetTexture(ShaderConstants._CurveHueVsHue, curves.hueVsHue.value.GetTexture());
material.SetTexture(ShaderConstants._CurveHueVsSat, curves.hueVsSat.value.GetTexture());
material.SetTexture(ShaderConstants._CurveLumVsSat, curves.lumVsSat.value.GetTexture());
material.SetTexture(ShaderConstants._CurveSatVsSat, curves.satVsSat.value.GetTexture());
// Tonemapping (baked into the lut for HDR)
if (hdr)
{
material.shaderKeywords = null;
switch (tonemapping.mode.value)
{
case TonemappingMode.Neutral: material.EnableKeyword(ShaderKeywordStrings.TonemapNeutral); break;
case TonemappingMode.ACES: material.EnableKeyword(allowColorGradingACESHDR ? ShaderKeywordStrings.TonemapACES : ShaderKeywordStrings.TonemapNeutral); break;
default: break; // None
}
// HDR output is active
if (passData.cameraData.isHDROutputActive)
{
Vector4 hdrOutputLuminanceParams;
Vector4 hdrOutputGradingParams;
UniversalRenderPipeline.GetHDROutputLuminanceParameters(passData.cameraData.hdrDisplayInformation, passData.cameraData.hdrDisplayColorGamut, tonemapping, out hdrOutputLuminanceParams);
UniversalRenderPipeline.GetHDROutputGradingParameters(tonemapping, out hdrOutputGradingParams);
material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, hdrOutputLuminanceParams);
material.SetVector(ShaderPropertyId.hdrOutputGradingParams, hdrOutputGradingParams);
HDROutputUtils.ConfigureHDROutput(material, passData.cameraData.hdrDisplayColorGamut, HDROutputUtils.Operation.ColorConversion);
}
}
passData.cameraData.xr.StopSinglePass(cmd);
// Render the lut.
Blitter.BlitTexture(cmd, internalLutTarget, Vector2.one, material, 0);
passData.cameraData.xr.StartSinglePass(cmd);
}
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, out TextureHandle internalColorLut)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalPostProcessingData postProcessingData= frameData.Get<UniversalPostProcessingData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
this.ConfigureDescriptor(in postProcessingData, out var lutDesc, out var filterMode);
internalColorLut = UniversalRenderer.CreateRenderGraphTexture(renderGraph, lutDesc, "_InternalGradingLut", true, filterMode);
passData.cameraData = cameraData;
passData.postProcessingData = postProcessingData;
passData.internalLut = internalColorLut;
builder.SetRenderAttachment(internalColorLut, 0, AccessFlags.WriteAll);
passData.lutBuilderLdr = m_LutBuilderLdr;
passData.lutBuilderHdr = m_LutBuilderHdr;
passData.allowColorGradingACESHDR = m_AllowColorGradingACESHDR;
// TODO RENDERGRAPH: culling? force culling off for testing
builder.AllowPassCulling(false);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.internalLut);
});
return;
}
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Cleanup()
{
CoreUtils.Destroy(m_LutBuilderLdr);
CoreUtils.Destroy(m_LutBuilderHdr);
}
// Precomputed shader ids to same some CPU cycles (mostly affects mobile)
static class ShaderConstants
{
public static readonly int _Lut_Params = Shader.PropertyToID("_Lut_Params");
public static readonly int _ColorBalance = Shader.PropertyToID("_ColorBalance");
public static readonly int _ColorFilter = Shader.PropertyToID("_ColorFilter");
public static readonly int _ChannelMixerRed = Shader.PropertyToID("_ChannelMixerRed");
public static readonly int _ChannelMixerGreen = Shader.PropertyToID("_ChannelMixerGreen");
public static readonly int _ChannelMixerBlue = Shader.PropertyToID("_ChannelMixerBlue");
public static readonly int _HueSatCon = Shader.PropertyToID("_HueSatCon");
public static readonly int _Lift = Shader.PropertyToID("_Lift");
public static readonly int _Gamma = Shader.PropertyToID("_Gamma");
public static readonly int _Gain = Shader.PropertyToID("_Gain");
public static readonly int _Shadows = Shader.PropertyToID("_Shadows");
public static readonly int _Midtones = Shader.PropertyToID("_Midtones");
public static readonly int _Highlights = Shader.PropertyToID("_Highlights");
public static readonly int _ShaHiLimits = Shader.PropertyToID("_ShaHiLimits");
public static readonly int _SplitShadows = Shader.PropertyToID("_SplitShadows");
public static readonly int _SplitHighlights = Shader.PropertyToID("_SplitHighlights");
public static readonly int _CurveMaster = Shader.PropertyToID("_CurveMaster");
public static readonly int _CurveRed = Shader.PropertyToID("_CurveRed");
public static readonly int _CurveGreen = Shader.PropertyToID("_CurveGreen");
public static readonly int _CurveBlue = Shader.PropertyToID("_CurveBlue");
public static readonly int _CurveHueVsHue = Shader.PropertyToID("_CurveHueVsHue");
public static readonly int _CurveHueVsSat = Shader.PropertyToID("_CurveHueVsSat");
public static readonly int _CurveLumVsSat = Shader.PropertyToID("_CurveLumVsSat");
public static readonly int _CurveSatVsSat = Shader.PropertyToID("_CurveSatVsSat");
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4ab9de92acc9d124bbc8f9b8e240d9c4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,241 @@
using System;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given color buffer to the given destination color buffer.
///
/// You can use this pass to copy a color buffer to the destination,
/// so you can use it later in rendering. For example, you can copy
/// the opaque texture to use it for distortion effects.
/// </summary>
public class CopyColorPass : ScriptableRenderPass
{
int m_SampleOffsetShaderHandle;
Material m_SamplingMaterial;
Downsampling m_DownsamplingMethod;
Material m_CopyColorMaterial;
private RTHandle source { get; set; }
private RTHandle destination { get; set; }
private PassData m_PassData;
/// <summary>
/// Creates a new <c>CopyColorPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="samplingMaterial">The <c>Material</c> to use for downsampling quarter-resolution image with box filtering.</param>
/// <param name="copyColorMaterial">The <c>Material</c> to use for other downsampling options.</param>
/// <param name="customPassName">An optional custom profiling name to disambiguate multiple copy passes.</param>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="Downsampling"/>
public CopyColorPass(RenderPassEvent evt, Material samplingMaterial, Material copyColorMaterial = null, string customPassName = null)
{
profilingSampler = customPassName != null ? new ProfilingSampler(customPassName) : ProfilingSampler.Get(URPProfileId.CopyColor);
m_PassData = new PassData();
m_SamplingMaterial = samplingMaterial;
m_CopyColorMaterial = copyColorMaterial;
m_SampleOffsetShaderHandle = Shader.PropertyToID("_SampleOffset");
renderPassEvent = evt;
m_DownsamplingMethod = Downsampling.None;
base.useNativeRenderPass = false;
}
/// <summary>
/// Get a descriptor and filter mode for the required texture for this pass
/// </summary>
/// <param name="downsamplingMethod"></param>
/// <param name="descriptor"></param>
/// <param name="filterMode"></param>
/// <seealso cref="Downsampling"/>
/// <seealso cref="RenderTextureDescriptor"/>
/// <seealso cref="FilterMode"/>
public static void ConfigureDescriptor(Downsampling downsamplingMethod, ref RenderTextureDescriptor descriptor, out FilterMode filterMode)
{
descriptor.msaaSamples = 1;
descriptor.depthStencilFormat = GraphicsFormat.None;
if (downsamplingMethod == Downsampling._2xBilinear)
{
descriptor.width = Mathf.Max(1, descriptor.width / 2);
descriptor.height = Mathf.Max(1, descriptor.height / 2);
}
else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear)
{
descriptor.width = Mathf.Max(1, descriptor.width / 4);
descriptor.height = Mathf.Max(1, descriptor.height / 4);
}
filterMode = downsamplingMethod == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear;
}
/// <summary>
/// Configure the pass with the source and destination to execute on.
/// </summary>
/// <param name="source">Source render target.</param>
/// <param name="destination">Destination render target.</param>
/// <param name="downsampling">The downsampling method to use.</param>
[Obsolete("Use RTHandles for source and destination.", true)]
public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination, Downsampling downsampling)
{
throw new NotSupportedException("Setup with RenderTargetIdentifier has been deprecated. Use it with RTHandles instead.");
}
/// <summary>
/// Configure the pass with the source and destination to execute on.
/// </summary>
/// <param name="source">Source render target.</param>
/// <param name="destination">Destination render target.</param>
/// <param name="downsampling">The downsampling method to use.</param>
public void Setup(RTHandle source, RTHandle destination, Downsampling downsampling)
{
this.source = source;
this.destination = destination;
m_DownsamplingMethod = downsampling;
}
/// <inheritdoc />
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
cmd.SetGlobalTexture(destination.name, destination.nameID);
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
m_PassData.samplingMaterial = m_SamplingMaterial;
m_PassData.copyColorMaterial = m_CopyColorMaterial;
m_PassData.downsamplingMethod = m_DownsamplingMethod;
m_PassData.sampleOffsetShaderHandle = m_SampleOffsetShaderHandle;
var cmd = renderingData.commandBuffer;
// TODO RENDERGRAPH: Do we need a similar check in the RenderGraph path?
//It is possible that the given color target is now the frontbuffer
if (source == renderingData.cameraData.renderer.GetCameraColorFrontBuffer(cmd))
{
source = renderingData.cameraData.renderer.cameraColorTargetHandle;
}
#if ENABLE_VR && ENABLE_XR_MODULE
if (renderingData.cameraData.xr.supportsFoveatedRendering)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
#endif
ScriptableRenderer.SetRenderTarget(cmd, destination, k_CameraTarget, clearFlag, clearColor);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(cmd), m_PassData, source, renderingData.cameraData.xr.enabled);
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHandle source, bool useDrawProceduralBlit)
{
var samplingMaterial = passData.samplingMaterial;
var copyColorMaterial = passData.copyColorMaterial;
var downsamplingMethod = passData.downsamplingMethod;
var sampleOffsetShaderHandle = passData.sampleOffsetShaderHandle;
if (samplingMaterial == null)
{
Debug.LogErrorFormat(
"Missing {0}. Copy Color render pass will not execute. Check for missing reference in the renderer resources.",
samplingMaterial);
return;
}
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.CopyColor)))
{
Vector2 viewportScale = source.useScaling ? new Vector2(source.rtHandleProperties.rtHandleScale.x, source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
switch (downsamplingMethod)
{
case Downsampling.None:
Blitter.BlitTexture(cmd, source, viewportScale, copyColorMaterial, 0);
break;
case Downsampling._2xBilinear:
Blitter.BlitTexture(cmd, source, viewportScale, copyColorMaterial, 1);
break;
case Downsampling._4xBox:
samplingMaterial.SetFloat(sampleOffsetShaderHandle, 2);
Blitter.BlitTexture(cmd, source, viewportScale, samplingMaterial, 0);
break;
case Downsampling._4xBilinear:
Blitter.BlitTexture(cmd, source, viewportScale, copyColorMaterial, 1);
break;
}
}
}
private class PassData
{
internal TextureHandle source;
internal TextureHandle destination;
// internal RenderingData renderingData;
internal bool useProceduralBlit;
internal Material samplingMaterial;
internal Material copyColorMaterial;
internal Downsampling downsamplingMethod;
internal int sampleOffsetShaderHandle;
}
internal TextureHandle Render(RenderGraph renderGraph, ContextContainer frameData, out TextureHandle destination, in TextureHandle source, Downsampling downsampling)
{
m_DownsamplingMethod = downsampling;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
RenderTextureDescriptor descriptor = cameraData.cameraTargetDescriptor;
ConfigureDescriptor(downsampling, ref descriptor, out var filterMode);
destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, descriptor, "_CameraOpaqueTexture", true, filterMode);
RenderInternal(renderGraph, destination, source, cameraData.xr.enabled);
return destination;
}
// This will not create a new texture, but will reuse an existing one as destination.
// Typical use case is a persistent texture imported to the render graph. For example history textures.
// Note that the amount of downsampling is determined by the destination size.
// Therefore, the downsampling param controls only the algorithm (shader) used for the downsampling, not size.
internal void RenderToExistingTexture(RenderGraph renderGraph, ContextContainer frameData, in TextureHandle destination, in TextureHandle source, Downsampling downsampling = Downsampling.None)
{
m_DownsamplingMethod = downsampling;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
RenderInternal(renderGraph, destination, source, cameraData.xr.enabled);
}
private void RenderInternal(RenderGraph renderGraph, in TextureHandle destination, in TextureHandle source, bool useProceduralBlit)
{
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
passData.destination = destination;
builder.SetRenderAttachment(destination, 0, AccessFlags.WriteAll);
passData.source = source;
builder.UseTexture(source, AccessFlags.Read);
passData.useProceduralBlit = useProceduralBlit;
passData.samplingMaterial = m_SamplingMaterial;
passData.copyColorMaterial = m_CopyColorMaterial;
passData.downsamplingMethod = m_DownsamplingMethod;
passData.sampleOffsetShaderHandle = m_SampleOffsetShaderHandle;
if (destination.IsValid())
builder.SetGlobalTextureAfterPass(destination, Shader.PropertyToID("_CameraOpaqueTexture"));
// TODO RENDERGRAPH: culling? force culling off for testing
builder.AllowPassCulling(false);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.source, data.useProceduralBlit);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7e18083ec47e7446fac286a4ecd439fc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,339 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given depth buffer into the given destination depth buffer.
///
/// You can use this pass to copy a depth buffer to a destination,
/// so you can use it later in rendering. If the source texture has MSAA
/// enabled, the pass uses a custom MSAA resolve. If the source texture
/// does not have MSAA enabled, the pass uses a Blit or a Copy Texture
/// operation, depending on what the current platform supports.
/// </summary>
public class CopyDepthPass : ScriptableRenderPass
{
private RTHandle source { get; set; }
private RTHandle destination { get; set; }
// TODO RENDERGRAPH: The Render method overwrites this property with -1 before doing anything else. It should only be used in Compatibility Mode!
internal int MsaaSamples { get; set; }
// In some cases (Scene view, XR and etc.) we actually want to output to depth buffer
// So this variable needs to be set to true to enable the correct copy shader semantic
internal bool CopyToDepth { get; set; }
// In XR CopyDepth, we need a special workaround to handle dummy color issue in RenderGraph.
internal bool CopyToDepthXR { get; set; }
// We need to know if we're copying to the backbuffer in order to handle y-flip correctly
internal bool CopyToBackbuffer { get; set; }
Material m_CopyDepthMaterial;
internal bool m_CopyResolvedDepth;
internal bool m_ShouldClear;
private PassData m_PassData;
/// <summary>
/// Shader resource ids used to communicate with the shader implementation
/// </summary>
static class ShaderConstants
{
public static readonly int _CameraDepthAttachment = Shader.PropertyToID("_CameraDepthAttachment");
public static readonly int _CameraDepthTexture = Shader.PropertyToID("_CameraDepthTexture");
public static readonly int _ZWriteShaderHandle = Shader.PropertyToID("_ZWrite");
}
/// <summary>
/// Creates a new <c>CopyDepthPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="copyDepthShader">The <c>Shader</c> to use for copying the depth.</param>
/// <param name="shouldClear">Controls whether it should do a clear before copying the depth.</param>
/// <param name="copyToDepth">Controls whether it should do a copy to a depth format target.</param>
/// <param name="copyResolvedDepth">Set to true if the source depth is MSAA resolved.</param>
/// <param name="customPassName">An optional custom profiling name to disambiguate multiple copy passes.</param>
/// <seealso cref="RenderPassEvent"/>
public CopyDepthPass(RenderPassEvent evt, Shader copyDepthShader, bool shouldClear = false, bool copyToDepth = false, bool copyResolvedDepth = false, string customPassName = null)
{
profilingSampler = customPassName != null ? new ProfilingSampler(customPassName) : ProfilingSampler.Get(URPProfileId.CopyDepth);
m_PassData = new PassData();
CopyToDepth = copyToDepth;
m_CopyDepthMaterial = copyDepthShader != null ? CoreUtils.CreateEngineMaterial(copyDepthShader) : null;
renderPassEvent = evt;
m_CopyResolvedDepth = copyResolvedDepth;
m_ShouldClear = shouldClear;
CopyToDepthXR = false;
CopyToBackbuffer = false;
}
/// <summary>
/// Configure the pass with the source and destination to execute on.
/// </summary>
/// <param name="source">Source Render Target</param>
/// <param name="destination">Destination Render Target</param>
public void Setup(RTHandle source, RTHandle destination)
{
this.source = source;
this.destination = destination;
this.MsaaSamples = -1;
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Dispose()
{
CoreUtils.Destroy(m_CopyDepthMaterial);
}
/// <inheritdoc />
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
#if UNITY_EDITOR
// This is a temporary workaround for Editor as not setting any depth here
// would lead to overwriting depth in certain scenarios (reproducable while running DX11 tests)
if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11)
ConfigureTarget(destination, destination);
else
#endif
ConfigureTarget(destination);
if (m_ShouldClear)
ConfigureClear(ClearFlag.All, Color.black);
#pragma warning restore CS0618
}
private class PassData
{
internal TextureHandle source;
internal UniversalCameraData cameraData;
internal Material copyDepthMaterial;
internal int msaaSamples;
internal bool copyResolvedDepth;
internal bool copyToDepth;
internal bool isDstBackbuffer;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var cameraData = renderingData.frameData.Get<UniversalCameraData>();
m_PassData.copyDepthMaterial = m_CopyDepthMaterial;
m_PassData.msaaSamples = MsaaSamples;
m_PassData.copyResolvedDepth = m_CopyResolvedDepth;
m_PassData.copyToDepth = CopyToDepth || CopyToDepthXR;
m_PassData.isDstBackbuffer = CopyToBackbuffer || CopyToDepthXR;
m_PassData.cameraData = cameraData;
var cmd = renderingData.commandBuffer;
cmd.SetGlobalTexture(ShaderConstants._CameraDepthAttachment, source.nameID);
#if ENABLE_VR && ENABLE_XR_MODULE
if (m_PassData.cameraData.xr.enabled)
{
if (m_PassData.cameraData.xr.supportsFoveatedRendering)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
}
#endif
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(cmd), m_PassData, this.source);
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHandle source)
{
var copyDepthMaterial = passData.copyDepthMaterial;
var msaaSamples = passData.msaaSamples;
var copyResolvedDepth = passData.copyResolvedDepth;
var copyToDepth = passData.copyToDepth;
if (copyDepthMaterial == null)
{
Debug.LogErrorFormat("Missing {0}. Copy Depth render pass will not execute. Check for missing reference in the renderer resources.", copyDepthMaterial);
return;
}
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.CopyDepth)))
{
int cameraSamples = 0;
// When depth resolve is supported and requested, or multisampled texture is not supported, force camera samples to 1
if (copyResolvedDepth || SystemInfo.supportsMultisampledTextures == 0)
{
cameraSamples = 1;
}
else if (msaaSamples == -1) // RG path
{
cameraSamples = source.rt.antiAliasing;
}
else
{
cameraSamples = msaaSamples;
}
switch (cameraSamples)
{
case 8:
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa2, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa4, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa8, true);
break;
case 4:
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa2, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa4, true);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa8, false);
break;
case 2:
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa2, true);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa4, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa8, false);
break;
// MSAA disabled, auto resolve supported, resolve texture requested, or ms textures not supported
default:
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa2, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa4, false);
cmd.SetKeyword(ShaderGlobalKeywords.DepthMsaa8, false);
break;
}
cmd.SetKeyword(ShaderGlobalKeywords._OUTPUT_DEPTH, copyToDepth);
// We must perform a yflip if we're rendering into the backbuffer and we have a flipped source texture.
bool yflip = passData.isDstBackbuffer && passData.cameraData.IsHandleYFlipped(source);
Vector2 viewportScale = source.useScaling ? new Vector2(source.rtHandleProperties.rtHandleScale.x, source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
Vector4 scaleBias = yflip ? new Vector4(viewportScale.x, -viewportScale.y, 0, viewportScale.y) : new Vector4(viewportScale.x, viewportScale.y, 0, 0);
// When we render to the backbuffer, we update the viewport to cover the entire screen just in case it hasn't been updated already.
if (passData.isDstBackbuffer)
cmd.SetViewport(passData.cameraData.pixelRect);
copyDepthMaterial.SetTexture(ShaderConstants._CameraDepthAttachment, source);
copyDepthMaterial.SetFloat(ShaderConstants._ZWriteShaderHandle, copyToDepth ? 1.0f : 0.0f);
Blitter.BlitTexture(cmd, source, scaleBias, copyDepthMaterial, 0);
}
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
destination = k_CameraTarget;
#pragma warning restore CS0618
}
/// <summary>
/// Sets up the Copy Depth pass for RenderGraph execution
/// </summary>
/// <param name="renderGraph">The current RenderGraph used for recording and execution of a frame.</param>
/// <param name="frameData">The renderer settings containing rendering data of the current frame.</param>
/// <param name="destination"><c>TextureHandle</c> of the destination it will copy to.</param>
/// <param name="source"><c>TextureHandle</c> of the source it will copy from.</param>
/// <param name="bindAsCameraDepth">If this is true, the destination texture is bound as _CameraDepthTexture after the copy pass</param>
/// <param name="passName">The pass name used for debug and identifying the pass.</param>
public void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle destination, TextureHandle source, bool bindAsCameraDepth = false, string passName = "Copy Depth")
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
Render(renderGraph, destination, source, resourceData, cameraData, bindAsCameraDepth, passName);
}
/// <summary>
/// Sets up the Copy Depth pass for RenderGraph execution
/// </summary>
/// <param name="renderGraph">The current RenderGraph used for recording and execution of a frame.</param>
/// <param name="destination"><c>TextureHandle</c> of the destination it will copy to.</param>
/// <param name="source"><c>TextureHandle</c> of the source it will copy from.</param>
/// <param name="resourceData">URP texture handles for the current frame.</param>
/// <param name="cameraData">Camera settings for the current frame.</param>
/// <param name="bindAsCameraDepth">If this is true, the destination texture is bound as _CameraDepthTexture after the copy pass</param>
/// <param name="passName">The pass name used for debug and identifying the pass.</param>
public void Render(RenderGraph renderGraph, TextureHandle destination, TextureHandle source, UniversalResourceData resourceData, UniversalCameraData cameraData, bool bindAsCameraDepth = false, string passName = "Copy Depth")
{
// TODO RENDERGRAPH: should call the equivalent of Setup() to initialise everything correctly
MsaaSamples = -1;
// Having a different pass name than profilingSampler.name is bad practice but this method was public before we cleaned up this naming
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
passData.copyDepthMaterial = m_CopyDepthMaterial;
passData.msaaSamples = MsaaSamples;
passData.cameraData = cameraData;
passData.copyResolvedDepth = m_CopyResolvedDepth;
passData.copyToDepth = CopyToDepth || CopyToDepthXR;
passData.isDstBackbuffer = CopyToBackbuffer || CopyToDepthXR;
if (CopyToDepth)
{
// Writes depth using custom depth output
builder.SetRenderAttachmentDepth(destination, AccessFlags.WriteAll);
#if UNITY_EDITOR
// binding a dummy color target as a workaround to an OSX issue in Editor scene view (UUM-47698).
// Also required for preview camera rendering for grid drawn with builtin RP (UUM-55171).
if (cameraData.isSceneViewCamera || cameraData.isPreviewCamera)
builder.SetRenderAttachment(resourceData.activeColorTexture, 0);
#endif
}
else if (CopyToDepthXR)
{
// Writes depth using custom depth output
builder.SetRenderAttachmentDepth(destination, AccessFlags.WriteAll);
#if ENABLE_VR && ENABLE_XR_MODULE
// binding a dummy color target as a workaround to NRP depth only rendering limitation:
// "Attempting to render to a depth only surface with no dummy color attachment"
if (cameraData.xr.enabled && cameraData.xr.copyDepth)
{
RenderTargetInfo backBufferDesc = renderGraph.GetRenderTargetInfo(resourceData.backBufferColor);
// In the case where MSAA is enabled, we have to bind a different dummy texture
// This is to ensure that we don't render black in the resolve result of the color backbuffer
// This also makes this pass unmergeable in this case, potentially impacting performance
if (backBufferDesc.msaaSamples > 1)
{
TextureHandle dummyXRRenderTarget = renderGraph.CreateTexture(new TextureDesc(backBufferDesc.width, backBufferDesc.height, false, true)
{
name = "XR Copy Depth Dummy Render Target",
slices = backBufferDesc.volumeDepth,
format = backBufferDesc.format,
msaaSamples = (MSAASamples)backBufferDesc.msaaSamples,
clearBuffer = false,
bindTextureMS = backBufferDesc.bindMS
});
builder.SetRenderAttachment(dummyXRRenderTarget, 0);
}
else
builder.SetRenderAttachment(resourceData.backBufferColor, 0);
}
#endif
}
else
{
// Writes depth as "grayscale color" output
builder.SetRenderAttachment(destination, 0, AccessFlags.WriteAll);
}
passData.source = source;
builder.UseTexture(source, AccessFlags.Read);
if (bindAsCameraDepth && destination.IsValid())
builder.SetGlobalTextureAfterPass(destination, ShaderConstants._CameraDepthTexture);
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.source);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d6268b3babfc1004c82ace3c407f46ef
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,130 @@
using System;
using UnityEngine.Experimental.GlobalIllumination;
using UnityEngine.Profiling;
using Unity.Collections;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
// cleanup code
// listMinDepth and maxDepth should be stored in a different uniform block?
// Point lights stored as vec4
// RelLightIndices should be stored in ushort instead of uint.
// TODO use Unity.Mathematics
// TODO Check if there is a bitarray structure (with dynamic size) available in Unity
namespace UnityEngine.Rendering.Universal.Internal
{
// Render all tiled-based deferred lights.
internal class DeferredPass : ScriptableRenderPass
{
DeferredLights m_DeferredLights;
public DeferredPass(RenderPassEvent evt, DeferredLights deferredLights)
{
profilingSampler = new ProfilingSampler("Render Deferred Lighting");
base.renderPassEvent = evt;
m_DeferredLights = deferredLights;
}
// ScriptableRenderPass
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescripor)
{
var lightingAttachment = m_DeferredLights.GbufferAttachments[m_DeferredLights.GBufferLightingIndex];
var depthAttachment = m_DeferredLights.DepthAttachmentHandle;
if (m_DeferredLights.UseFramebufferFetch)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureInputAttachments(m_DeferredLights.DeferredInputAttachments, m_DeferredLights.DeferredInputIsTransient);
#pragma warning restore CS0618
}
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
// TODO: Cannot currently bind depth texture as read-only!
ConfigureTarget(lightingAttachment, depthAttachment);
#pragma warning restore CS0618
}
// ScriptableRenderPass
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
m_DeferredLights.ExecuteDeferredPass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), cameraData, lightData, shadowData);
}
private class PassData
{
internal UniversalCameraData cameraData;
internal UniversalLightData lightData;
internal UniversalShadowData shadowData;
internal TextureHandle color;
internal TextureHandle depth;
internal TextureHandle[] gbuffer;
internal DeferredLights deferredLights;
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle color, TextureHandle depth, TextureHandle[] gbuffer)
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
passData.cameraData = cameraData;
passData.lightData = lightData;
passData.shadowData = shadowData;
passData.color = color;
builder.SetRenderAttachment(color, 0, AccessFlags.Write);
passData.depth = depth;
builder.SetRenderAttachmentDepth(depth, AccessFlags.Write);
passData.deferredLights = m_DeferredLights;
if (!m_DeferredLights.UseFramebufferFetch)
{
for (int i = 0; i < gbuffer.Length; ++i)
{
if (i != m_DeferredLights.GBufferLightingIndex)
builder.UseTexture(gbuffer[i], AccessFlags.Read);
}
}
else
{
var idx = 0;
for (int i = 0; i < gbuffer.Length; ++i)
{
if (i != m_DeferredLights.GBufferLightingIndex)
{
builder.SetInputAttachment(gbuffer[i], idx, AccessFlags.Read);
idx++;
}
}
}
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
data.deferredLights.ExecuteDeferredPass(context.cmd, data.cameraData, data.lightData, data.shadowData);
});
}
}
// ScriptableRenderPass
public override void OnCameraCleanup(CommandBuffer cmd)
{
m_DeferredLights.OnCameraCleanup(cmd);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 1d258bcfb9f91794090920274896e493
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,240 @@
using System;
using System.Collections.Generic;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Render all objects that have a 'DepthNormals' and/or 'DepthNormalsOnly' pass into the given depth and normal buffers.
/// </summary>
public class DepthNormalOnlyPass : ScriptableRenderPass
{
internal List<ShaderTagId> shaderTagIds { get; set; }
private RTHandle depthHandle { get; set; }
private RTHandle normalHandle { get; set; }
private RTHandle renderingLayersHandle { get; set; }
internal bool enableRenderingLayers { get; set; } = false;
internal RenderingLayerUtils.MaskSize renderingLayersMaskSize { get; set; }
private FilteringSettings m_FilteringSettings;
private PassData m_PassData;
// Statics
private static readonly List<ShaderTagId> k_DepthNormals = new List<ShaderTagId> { new ShaderTagId("DepthNormals"), new ShaderTagId("DepthNormalsOnly") };
private static readonly RTHandle[] k_ColorAttachment1 = new RTHandle[1];
private static readonly RTHandle[] k_ColorAttachment2 = new RTHandle[2];
internal static readonly string k_CameraNormalsTextureName = "_CameraNormalsTexture";
private static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
private static readonly int s_CameraNormalsTextureID = Shader.PropertyToID(k_CameraNormalsTextureName);
private static readonly int s_CameraRenderingLayersTextureID = Shader.PropertyToID("_CameraRenderingLayersTexture");
/// <summary>
/// Creates a new <c>DepthNormalOnlyPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="renderQueueRange">The <c>RenderQueueRange</c> to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="RenderQueueRange"/>
/// <seealso cref="LayerMask"/>
public DepthNormalOnlyPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.DrawDepthNormalPrepass);
m_PassData = new PassData();
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
renderPassEvent = evt;
useNativeRenderPass = false;
this.shaderTagIds = k_DepthNormals;
}
/// <summary>
/// Finds the format to use for the normals texture.
/// </summary>
/// <returns>The GraphicsFormat to use with the Normals texture.</returns>
public static GraphicsFormat GetGraphicsFormat()
{
if (SystemInfo.IsFormatSupported(GraphicsFormat.R8G8B8A8_SNorm, GraphicsFormatUsage.Render))
return GraphicsFormat.R8G8B8A8_SNorm; // Preferred format
else if (SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, GraphicsFormatUsage.Render))
return GraphicsFormat.R16G16B16A16_SFloat; // fallback
else
return GraphicsFormat.R32G32B32A32_SFloat; // fallback
}
/// <summary>
/// Configures the pass.
/// </summary>
/// <param name="depthHandle">The <c>RTHandle</c> used to render depth to.</param>
/// <param name="normalHandle">The <c>RTHandle</c> used to render normals.</param>
/// <seealso cref="RTHandle"/>
public void Setup(RTHandle depthHandle, RTHandle normalHandle)
{
this.depthHandle = depthHandle;
this.normalHandle = normalHandle;
enableRenderingLayers = false;
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="depthHandle">The <c>RTHandle</c> used to render depth to.</param>
/// <param name="normalHandle">The <c>RTHandle</c> used to render normals.</param>
/// <param name="decalLayerHandle">The <c>RTHandle</c> used to render decals.</param>
public void Setup(RTHandle depthHandle, RTHandle normalHandle, RTHandle decalLayerHandle)
{
Setup(depthHandle, normalHandle);
renderingLayersHandle = decalLayerHandle;
enableRenderingLayers = true;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
RTHandle[] colorHandles;
if (enableRenderingLayers)
{
k_ColorAttachment2[0] = normalHandle;
k_ColorAttachment2[1] = renderingLayersHandle;
colorHandles = k_ColorAttachment2;
}
else
{
k_ColorAttachment1[0] = normalHandle;
colorHandles = k_ColorAttachment1;
}
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
if (renderingData.cameraData.renderer.useDepthPriming && (renderingData.cameraData.renderType == CameraRenderType.Base || renderingData.cameraData.clearDepth))
ConfigureTarget(colorHandles, renderingData.cameraData.renderer.cameraDepthTargetHandle);
else
ConfigureTarget(colorHandles, depthHandle);
ConfigureClear(ClearFlag.All, Color.black);
#pragma warning restore CS0618
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RendererList rendererList)
{
// Enable Rendering Layers
if (passData.enableRenderingLayers)
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, true);
// Draw
cmd.DrawRendererList(rendererList);
// Clean up
if (passData.enableRenderingLayers)
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, false);
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
m_PassData.enableRenderingLayers = enableRenderingLayers;
var param = InitRendererListParams(universalRenderingData, cameraData,lightData);
var rendererList = context.CreateRendererList(ref param);
var cmd = CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer);
using (new ProfilingScope(cmd, profilingSampler))
{
ExecutePass(cmd, m_PassData, rendererList);
}
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
{
throw new ArgumentNullException("cmd");
}
normalHandle = null;
depthHandle = null;
renderingLayersHandle = null;
// This needs to be reset as the renderer might change this in runtime (UUM-36069)
shaderTagIds = k_DepthNormals;
}
/// <summary>
/// Shared pass data
/// </summary>
private class PassData
{
internal TextureHandle cameraDepthTexture;
internal TextureHandle cameraNormalsTexture;
internal bool enableRenderingLayers;
internal RenderingLayerUtils.MaskSize maskSize;
internal RendererListHandle rendererList;
}
private RendererListParams InitRendererListParams(UniversalRenderingData renderingData, UniversalCameraData cameraData, UniversalLightData lightData)
{
var sortFlags = cameraData.defaultOpaqueSortFlags;
var drawSettings = RenderingUtils.CreateDrawingSettings(this.shaderTagIds, renderingData, cameraData, lightData, sortFlags);
drawSettings.perObjectData = PerObjectData.None;
return new RendererListParams(renderingData.cullResults, drawSettings, m_FilteringSettings);
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraNormalsTexture, TextureHandle cameraDepthTexture, TextureHandle renderingLayersTexture, uint batchLayerMask, bool setGlobalDepth, bool setGlobalTextures)
{
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
passData.cameraNormalsTexture = cameraNormalsTexture;
builder.SetRenderAttachment(cameraNormalsTexture, 0, AccessFlags.Write);
passData.cameraDepthTexture = cameraDepthTexture;
builder.SetRenderAttachmentDepth(cameraDepthTexture, AccessFlags.Write);
passData.enableRenderingLayers = enableRenderingLayers;
if (passData.enableRenderingLayers)
{
builder.SetRenderAttachment(renderingLayersTexture, 1, AccessFlags.Write);
passData.maskSize = renderingLayersMaskSize;
}
var param = InitRendererListParams(renderingData, cameraData, lightData);
param.filteringSettings.batchLayerMask = batchLayerMask;
passData.rendererList = renderGraph.CreateRendererList(param);
builder.UseRendererList(passData.rendererList);
if (cameraData.xr.enabled)
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && cameraData.xrUniversal.canFoveateIntermediatePasses);
if (setGlobalTextures)
{
builder.SetGlobalTextureAfterPass(cameraNormalsTexture, s_CameraNormalsTextureID);
if (passData.enableRenderingLayers)
builder.SetGlobalTextureAfterPass(renderingLayersTexture, s_CameraRenderingLayersTextureID);
}
if (setGlobalDepth)
builder.SetGlobalTextureAfterPass(cameraDepthTexture, s_CameraDepthTextureID);
// Required here because of RenderingLayerUtils.SetupProperties
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
RenderingLayerUtils.SetupProperties(context.cmd, data.maskSize);
ExecutePass(context.cmd, data, data.rendererList);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6dfa2b31659ab4047b6bc33be7d8f07e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,154 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Render all objects that have a 'DepthOnly' pass into the given depth buffer.
///
/// You can use this pass to prime a depth buffer for subsequent rendering.
/// Use it as a z-prepass, or use it to generate a depth buffer.
/// </summary>
public class DepthOnlyPass : ScriptableRenderPass
{
private RTHandle destination { get; set; }
private GraphicsFormat depthStencilFormat;
internal ShaderTagId shaderTagId { get; set; } = k_ShaderTagId;
private PassData m_PassData;
FilteringSettings m_FilteringSettings;
// Statics
private static readonly ShaderTagId k_ShaderTagId = new ShaderTagId("DepthOnly");
private static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
/// <summary>
/// Creates a new <c>DepthOnlyPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="renderQueueRange">The <c>RenderQueueRange</c> to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="RenderQueueRange"/>
/// <seealso cref="LayerMask"/>
public DepthOnlyPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask)
{
profilingSampler = new ProfilingSampler("Draw Depth Only");
m_PassData = new PassData();
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
renderPassEvent = evt;
useNativeRenderPass = false;
this.shaderTagId = k_ShaderTagId;
}
/// <summary>
/// Configures the pass.
/// </summary>
/// <param name="baseDescriptor">The <c>RenderTextureDescriptor</c> used for the depthStencilFormat.</param>
/// <param name="depthAttachmentHandle">The <c>RTHandle</c> used to render to.</param>
/// <seealso cref="RenderTextureDescriptor"/>
/// <seealso cref="RTHandle"/>
/// <seealso cref="GraphicsFormat"/>
public void Setup(
RenderTextureDescriptor baseDescriptor,
RTHandle depthAttachmentHandle)
{
this.destination = depthAttachmentHandle;
this.depthStencilFormat = baseDescriptor.depthStencilFormat;
}
/// <inheritdoc />
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
var desc = renderingData.cameraData.cameraTargetDescriptor;
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
// When depth priming is in use the camera target should not be overridden so the Camera's MSAA depth attachment is used.
if (renderingData.cameraData.renderer.useDepthPriming && (renderingData.cameraData.renderType == CameraRenderType.Base || renderingData.cameraData.clearDepth))
{
ConfigureTarget(renderingData.cameraData.renderer.cameraDepthTargetHandle);
// Only clear depth here so we don't clear any bound color target. It might be unused by this pass but that doesn't mean we can just clear it. (e.g. in case of overlay cameras + depth priming)
ConfigureClear(ClearFlag.Depth, Color.black);
}
// When not using depth priming the camera target should be set to our non MSAA depth target.
else
{
useNativeRenderPass = true;
ConfigureTarget(destination);
ConfigureClear(ClearFlag.All, Color.black);
}
#pragma warning restore CS0618
}
private static void ExecutePass(RasterCommandBuffer cmd, RendererList rendererList)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.DepthPrepass)))
{
cmd.DrawRendererList(rendererList);
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
var param = InitRendererListParams(universalRenderingData, cameraData, lightData);
RendererList rendererList = context.CreateRendererList(ref param);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), rendererList);
}
private class PassData
{
internal RendererListHandle rendererList;
}
private RendererListParams InitRendererListParams(UniversalRenderingData renderingData, UniversalCameraData cameraData, UniversalLightData lightData)
{
var sortFlags = cameraData.defaultOpaqueSortFlags;
var drawSettings = RenderingUtils.CreateDrawingSettings(this.shaderTagId, renderingData, cameraData, lightData, sortFlags);
drawSettings.perObjectData = PerObjectData.None;
drawSettings.lodCrossFadeStencilMask = 0; // For stencil-based Lod, we use texture dither instead of stencil testing because we have the same shader variants for cross-fade shadow.
return new RendererListParams(renderingData.cullResults, drawSettings, m_FilteringSettings);
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, ref TextureHandle cameraDepthTexture, uint batchLayerMask, bool setGlobalDepth)
{
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
var param = InitRendererListParams(renderingData, cameraData, lightData);
param.filteringSettings.batchLayerMask = batchLayerMask;
passData.rendererList = renderGraph.CreateRendererList(param);
builder.UseRendererList(passData.rendererList);
builder.SetRenderAttachmentDepth(cameraDepthTexture, AccessFlags.Write);
if (setGlobalDepth)
builder.SetGlobalTextureAfterPass(cameraDepthTexture, s_CameraDepthTextureID);
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && cameraData.xrUniversal.canFoveateIntermediatePasses);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data.rendererList);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2b327960b30da614ca5f44f2fef0137a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,486 @@
using System;
using System.Collections.Generic;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Profiling;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Draw objects into the given color and depth target
///
/// You can use this pass to render objects that have a material and/or shader
/// with the pass names UniversalForward or SRPDefaultUnlit.
/// </summary>
public class DrawObjectsPass : ScriptableRenderPass
{
FilteringSettings m_FilteringSettings;
RenderStateBlock m_RenderStateBlock;
List<ShaderTagId> m_ShaderTagIdList = new List<ShaderTagId>();
bool m_IsOpaque;
/// <summary>
/// Used to indicate if the active target of the pass is the back buffer
/// </summary>
public bool m_IsActiveTargetBackBuffer; // TODO: Remove this when we remove non-RG path
/// <summary>
/// Used to indicate whether transparent objects should receive shadows or not.
/// </summary>
public bool m_ShouldTransparentsReceiveShadows;
PassData m_PassData;
static readonly int s_DrawObjectPassDataPropID = Shader.PropertyToID("_DrawObjectPassData");
/// <summary>
/// Creates a new <c>DrawObjectsPass</c> instance.
/// </summary>
/// <param name="profilerTag">The profiler tag used with the pass.</param>
/// <param name="shaderTagIds"></param>
/// <param name="opaque">Marks whether the objects are opaque or transparent.</param>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="renderQueueRange">The <c>RenderQueueRange</c> to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="stencilState">The stencil settings to use with this poss.</param>
/// <param name="stencilReference">The stencil reference value to use with this pass.</param>
/// <seealso cref="ShaderTagId"/>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="RenderQueueRange"/>
/// <seealso cref="LayerMask"/>
/// <seealso cref="StencilState"/>
public DrawObjectsPass(string profilerTag, ShaderTagId[] shaderTagIds, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
{
Init(opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference, shaderTagIds);
profilingSampler = new ProfilingSampler(profilerTag);
}
/// <summary>
/// Creates a new <c>DrawObjectsPass</c> instance.
/// </summary>
/// <param name="profilerTag">The profiler tag used with the pass.</param>
/// <param name="opaque">Marks whether the objects are opaque or transparent.</param>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="renderQueueRange">The <c>RenderQueueRange</c> to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="stencilState">The stencil settings to use with this poss.</param>
/// <param name="stencilReference">The stencil reference value to use with this pass.</param>
/// <seealso cref="RenderPassEvent"/>
/// <seealso cref="RenderQueueRange"/>
/// <seealso cref="LayerMask"/>
/// <seealso cref="StencilState"/>
public DrawObjectsPass(string profilerTag, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
: this(profilerTag, null, opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference)
{ }
internal DrawObjectsPass(URPProfileId profileId, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
{
Init(opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference);
profilingSampler = ProfilingSampler.Get(profileId);
}
internal void Init(bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference, ShaderTagId[] shaderTagIds = null)
{
if (shaderTagIds == null)
shaderTagIds = new ShaderTagId[] { new ShaderTagId("SRPDefaultUnlit"), new ShaderTagId("UniversalForward"), new ShaderTagId("UniversalForwardOnly") };
m_PassData = new PassData();
foreach (ShaderTagId sid in shaderTagIds)
m_ShaderTagIdList.Add(sid);
renderPassEvent = evt;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_IsOpaque = opaque;
m_ShouldTransparentsReceiveShadows = false;
m_IsActiveTargetBackBuffer = false;
if (stencilState.enabled)
{
m_RenderStateBlock.stencilReference = stencilReference;
m_RenderStateBlock.mask = RenderStateMask.Stencil;
m_RenderStateBlock.stencilState = stencilState;
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
InitPassData(cameraData, ref m_PassData, uint.MaxValue, m_IsActiveTargetBackBuffer);
InitRendererLists(universalRenderingData, cameraData, lightData, ref m_PassData, context, default(RenderGraph), false);
using (new ProfilingScope(renderingData.commandBuffer, profilingSampler))
{
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, m_PassData.rendererList, m_PassData.objectsWithErrorRendererList, m_PassData.cameraData.IsCameraProjectionMatrixFlipped());
}
}
internal static void ExecutePass(RasterCommandBuffer cmd, PassData data, RendererList rendererList, RendererList objectsWithErrorRendererList, bool yFlip)
{
// Global render pass data containing various settings.
// x,y,z are currently unused
// w is used for knowing whether the object is opaque(1) or alpha blended(0)
Vector4 drawObjectPassData = new Vector4(0.0f, 0.0f, 0.0f, (data.isOpaque) ? 1.0f : 0.0f);
cmd.SetGlobalVector(s_DrawObjectPassDataPropID, drawObjectPassData);
if (data.cameraData.xr.enabled && data.isActiveTargetBackBuffer)
{
cmd.SetViewport(data.cameraData.xr.GetViewport());
}
// scaleBias.x = flipSign
// scaleBias.y = scale
// scaleBias.z = bias
// scaleBias.w = unused
float flipSign = yFlip ? -1.0f : 1.0f;
Vector4 scaleBias = (flipSign < 0.0f)
? new Vector4(flipSign, 1.0f, -1.0f, 1.0f)
: new Vector4(flipSign, 0.0f, 1.0f, 1.0f);
cmd.SetGlobalVector(ShaderPropertyId.scaleBiasRt, scaleBias);
// Set a value that can be used by shaders to identify when AlphaToMask functionality may be active
// The material shader alpha clipping logic requires this value in order to function correctly in all cases.
float alphaToMaskAvailable = ((data.cameraData.cameraTargetDescriptor.msaaSamples > 1) && data.isOpaque) ? 1.0f : 0.0f;
cmd.SetGlobalFloat(ShaderPropertyId.alphaToMaskAvailable, alphaToMaskAvailable);
var activeDebugHandler = GetActiveDebugHandler(data.cameraData);
if (activeDebugHandler != null)
{
data.debugRendererLists.DrawWithRendererList(cmd);
}
else
{
cmd.DrawRendererList(rendererList);
// Render objects that did not match any shader pass with error shader
RenderingUtils.DrawRendererListObjectsWithError(cmd, ref objectsWithErrorRendererList);
}
}
/// <summary>
/// Shared pass data
/// </summary>
internal class PassData
{
internal TextureHandle albedoHdl;
internal TextureHandle depthHdl;
internal UniversalCameraData cameraData;
internal bool isOpaque;
internal bool shouldTransparentsReceiveShadows;
internal uint batchLayerMask;
internal bool isActiveTargetBackBuffer;
internal RendererListHandle rendererListHdl;
internal RendererListHandle objectsWithErrorRendererListHdl;
internal DebugRendererLists debugRendererLists;
// Required for code sharing purpose between RG and non-RG.
internal RendererList rendererList;
internal RendererList objectsWithErrorRendererList;
}
/// <summary>
/// Initialize the shared pass data.
/// </summary>
/// <param name="passData"></param>
internal void InitPassData(UniversalCameraData cameraData, ref PassData passData, uint batchLayerMask, bool isActiveTargetBackBuffer = false)
{
passData.cameraData = cameraData;
passData.isOpaque = m_IsOpaque;
passData.shouldTransparentsReceiveShadows = m_ShouldTransparentsReceiveShadows;
passData.batchLayerMask = batchLayerMask;
passData.isActiveTargetBackBuffer = isActiveTargetBackBuffer;
}
internal void InitRendererLists(UniversalRenderingData renderingData, UniversalCameraData cameraData, UniversalLightData lightData, ref PassData passData, ScriptableRenderContext context, RenderGraph renderGraph, bool useRenderGraph)
{
ref Camera camera = ref cameraData.camera;
var sortFlags = (m_IsOpaque) ? cameraData.defaultOpaqueSortFlags : SortingCriteria.CommonTransparent;
if (cameraData.renderer.useDepthPriming && m_IsOpaque && (cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth))
sortFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
var filterSettings = m_FilteringSettings;
filterSettings.batchLayerMask = passData.batchLayerMask;
#if UNITY_EDITOR
// When rendering the preview camera, we want the layer mask to be forced to Everything
if (cameraData.isPreviewCamera)
{
filterSettings.layerMask = -1;
}
#endif
DrawingSettings drawSettings = RenderingUtils.CreateDrawingSettings(m_ShaderTagIdList, renderingData, cameraData, lightData, sortFlags);
if (cameraData.renderer.useDepthPriming && m_IsOpaque && (cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth))
{
m_RenderStateBlock.depthState = new DepthState(false, CompareFunction.Equal);
m_RenderStateBlock.mask |= RenderStateMask.Depth;
}
else if (m_RenderStateBlock.depthState.compareFunction == CompareFunction.Equal)
{
m_RenderStateBlock.depthState = new DepthState(true, CompareFunction.LessEqual);
m_RenderStateBlock.mask |= RenderStateMask.Depth;
}
var activeDebugHandler = GetActiveDebugHandler(cameraData);
if (useRenderGraph)
{
if (activeDebugHandler != null)
{
passData.debugRendererLists = activeDebugHandler.CreateRendererListsWithDebugRenderState(renderGraph, ref renderingData.cullResults, ref drawSettings, ref filterSettings, ref m_RenderStateBlock);
}
else
{
RenderingUtils.CreateRendererListWithRenderStateBlock(renderGraph, ref renderingData.cullResults, drawSettings, filterSettings, m_RenderStateBlock, ref passData.rendererListHdl);
RenderingUtils.CreateRendererListObjectsWithError(renderGraph, ref renderingData.cullResults, camera, filterSettings, sortFlags, ref passData.objectsWithErrorRendererListHdl);
}
}
else
{
if (activeDebugHandler != null)
{
passData.debugRendererLists = activeDebugHandler.CreateRendererListsWithDebugRenderState(context, ref renderingData.cullResults, ref drawSettings, ref filterSettings, ref m_RenderStateBlock);
}
else
{
RenderingUtils.CreateRendererListWithRenderStateBlock(context, ref renderingData.cullResults, drawSettings, filterSettings, m_RenderStateBlock, ref passData.rendererList);
RenderingUtils.CreateRendererListObjectsWithError(context, ref renderingData.cullResults, camera, filterSettings, sortFlags, ref passData.objectsWithErrorRendererList);
}
}
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle colorTarget, TextureHandle depthTarget, TextureHandle mainShadowsTexture, TextureHandle additionalShadowsTexture, uint batchLayerMask = uint.MaxValue)
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
builder.UseAllGlobalTextures(true);
InitPassData(cameraData, ref passData, batchLayerMask, resourceData.isActiveTargetBackBuffer);
if (colorTarget.IsValid())
{
passData.albedoHdl = colorTarget;
builder.SetRenderAttachment(colorTarget, 0, AccessFlags.Write);
}
if (depthTarget.IsValid())
{
passData.depthHdl = depthTarget;
builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write);
}
if (mainShadowsTexture.IsValid())
builder.UseTexture(mainShadowsTexture, AccessFlags.Read);
if (additionalShadowsTexture.IsValid())
builder.UseTexture(additionalShadowsTexture, AccessFlags.Read);
TextureHandle ssaoTexture = resourceData.ssaoTexture;
if (ssaoTexture.IsValid())
builder.UseTexture(ssaoTexture, AccessFlags.Read);
RenderGraphUtils.UseDBufferIfValid(builder, resourceData);
InitRendererLists(renderingData, cameraData, lightData, ref passData, default(ScriptableRenderContext), renderGraph, true);
var activeDebugHandler = GetActiveDebugHandler(cameraData);
if (activeDebugHandler != null)
{
passData.debugRendererLists.PrepareRendererListForRasterPass(builder);
}
else
{
builder.UseRendererList(passData.rendererListHdl);
builder.UseRendererList(passData.objectsWithErrorRendererListHdl);
}
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
{
bool passSupportsFoveation = cameraData.xrUniversal.canFoveateIntermediatePasses || resourceData.isActiveTargetBackBuffer;
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
}
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
// Currently we only need to call this additional pass when the user
// doesn't want transparent objects to receive shadows
if (!data.isOpaque && !data.shouldTransparentsReceiveShadows)
TransparentSettingsPass.ExecutePass(context.cmd);
bool yFlip = data.cameraData.IsRenderTargetProjectionMatrixFlipped(data.albedoHdl, data.depthHdl);
ExecutePass(context.cmd, data, data.rendererListHdl, data.objectsWithErrorRendererListHdl, yFlip);
});
}
}
}
/// <summary>
/// Extension of DrawObjectPass that also output Rendering Layers Texture as second render target.
/// </summary>
internal class DrawObjectsWithRenderingLayersPass : DrawObjectsPass
{
RTHandle[] m_ColorTargetIndentifiers;
RTHandle m_DepthTargetIndentifiers;
/// <summary>
/// Creates a new <c>DrawObjectsWithRenderingLayersPass</c> instance.
/// </summary>
/// <param name="profilerTag">The profiler tag used with the pass.</param>
/// <param name="opaque">Marks whether the objects are opaque or transparent.</param>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="renderQueueRange">The <c>RenderQueueRange</c> to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="stencilState">The stencil settings to use with this poss.</param>
/// <param name="stencilReference">The stencil reference value to use with this pass.</param>
public DrawObjectsWithRenderingLayersPass(URPProfileId profilerTag, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference) :
base(profilerTag, opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference)
{
m_ColorTargetIndentifiers = new RTHandle[2];
}
/// <summary>
/// Sets up the pass.
/// </summary>
/// <param name="colorAttachment">Color attachment handle.</param>
/// <param name="renderingLayersTexture">Texture used with rendering layers.</param>
/// <param name="depthAttachment">Depth attachment handle.</param>
/// <exception cref="ArgumentException"></exception>
public void Setup(RTHandle colorAttachment, RTHandle renderingLayersTexture, RTHandle depthAttachment)
{
if (colorAttachment == null)
throw new ArgumentException("Color attachment can not be null", "colorAttachment");
if (renderingLayersTexture == null)
throw new ArgumentException("Rendering layers attachment can not be null", "renderingLayersTexture");
if (depthAttachment == null)
throw new ArgumentException("Depth attachment can not be null", "depthAttachment");
m_ColorTargetIndentifiers[0] = colorAttachment;
m_ColorTargetIndentifiers[1] = renderingLayersTexture;
m_DepthTargetIndentifiers = depthAttachment;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(m_ColorTargetIndentifiers, m_DepthTargetIndentifiers);
#pragma warning restore CS0618
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
CommandBuffer cmd = renderingData.commandBuffer;
// Enable Rendering Layers
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, true);
// Execute
base.Execute(context, ref renderingData);
// Clean up
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, false);
}
private class RenderingLayersPassData
{
internal PassData basePassData;
internal RenderingLayerUtils.MaskSize maskSize;
public RenderingLayersPassData()
{
basePassData = new PassData();
}
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle colorTarget, TextureHandle renderingLayersTexture, TextureHandle depthTarget, TextureHandle mainShadowsTexture, TextureHandle additionalShadowsTexture, RenderingLayerUtils.MaskSize maskSize, uint batchLayerMask = uint.MaxValue)
{
using (var builder = renderGraph.AddRasterRenderPass<RenderingLayersPassData>(passName, out var passData, profilingSampler))
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
InitPassData(cameraData, ref passData.basePassData, batchLayerMask);
passData.maskSize = maskSize;
passData.basePassData.albedoHdl = colorTarget;
builder.SetRenderAttachment(colorTarget, 0, AccessFlags.Write);
builder.SetRenderAttachment(renderingLayersTexture, 1, AccessFlags.Write);
passData.basePassData.depthHdl = depthTarget;
builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write);
if (mainShadowsTexture.IsValid())
builder.UseTexture(mainShadowsTexture, AccessFlags.Read);
if (additionalShadowsTexture.IsValid())
builder.UseTexture(additionalShadowsTexture, AccessFlags.Read);
UniversalRenderer renderer = cameraData.renderer as UniversalRenderer;
if (renderer != null)
{
TextureHandle ssaoTexture = resourceData.ssaoTexture;
if (ssaoTexture.IsValid())
builder.UseTexture(ssaoTexture, AccessFlags.Read);
RenderGraphUtils.UseDBufferIfValid(builder, resourceData);
}
InitRendererLists(renderingData, cameraData, lightData, ref passData.basePassData, default(ScriptableRenderContext), renderGraph, true);
var activeDebugHandler = GetActiveDebugHandler(cameraData);
if (activeDebugHandler != null)
{
passData.basePassData.debugRendererLists.PrepareRendererListForRasterPass(builder);
}
else
{
builder.UseRendererList(passData.basePassData.rendererListHdl);
builder.UseRendererList(passData.basePassData.objectsWithErrorRendererListHdl);
}
// Required here because of RenderingLayerUtils.SetupProperties
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
{
bool passSupportsFoveation = cameraData.xrUniversal.canFoveateIntermediatePasses || resourceData.isActiveTargetBackBuffer;
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
}
builder.SetRenderFunc((RenderingLayersPassData data, RasterGraphContext context) =>
{
// Enable Rendering Layers
context.cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, true);
RenderingLayerUtils.SetupProperties(context.cmd, data.maskSize);
// Currently we only need to call this additional pass when the user
// doesn't want transparent objects to receive shadows
if (!data.basePassData.isOpaque && !data.basePassData.shouldTransparentsReceiveShadows)
TransparentSettingsPass.ExecutePass(context.cmd);
bool yFlip = data.basePassData.cameraData.IsRenderTargetProjectionMatrixFlipped(data.basePassData.albedoHdl, data.basePassData.depthHdl);
// Execute
ExecutePass(context.cmd, data.basePassData, data.basePassData.rendererListHdl, data.basePassData.objectsWithErrorRendererListHdl, yFlip);
// Clean up
context.cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, false);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d2138a73081d8c34d901cf7a321f1099
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,264 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Rendering.Universal.Internal;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Draw screen space overlay UI into the given color and depth target
/// </summary>
internal class DrawScreenSpaceUIPass : ScriptableRenderPass
{
PassData m_PassData;
RTHandle m_ColorTarget;
RTHandle m_DepthTarget;
// Whether to render on an offscreen render texture or on the current active render target
bool m_RenderOffscreen;
static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
static readonly int s_CameraOpaqueTextureID = Shader.PropertyToID("_CameraOpaqueTexture");
/// <summary>
/// Creates a new <c>DrawScreenSpaceUIPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <seealso cref="RenderPassEvent"/>
public DrawScreenSpaceUIPass(RenderPassEvent evt, bool renderOffscreen)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.DrawScreenSpaceUI);
renderPassEvent = evt;
useNativeRenderPass = false;
m_RenderOffscreen = renderOffscreen;
m_PassData = new PassData();
}
/// <summary>
/// Get a descriptor for the required color texture for this pass.
/// </summary>
/// <param name="descriptor">Camera target descriptor.</param>
/// <param name="cameraWidth">Unscaled pixel width of the camera.</param>
/// <param name="cameraHeight">Unscaled pixel height of the camera.</param>
/// <seealso cref="RenderTextureDescriptor"/>
public static void ConfigureColorDescriptor(ref RenderTextureDescriptor descriptor, int cameraWidth, int cameraHeight)
{
descriptor.graphicsFormat = GraphicsFormat.R8G8B8A8_SRGB;
descriptor.depthStencilFormat = GraphicsFormat.None;
descriptor.width = cameraWidth;
descriptor.height = cameraHeight;
}
/// <summary>
/// Get a descriptor for the required depth texture for this pass.
/// </summary>
/// <param name="descriptor">Camera target descriptor.</param>
/// <param name="depthStencilFormat">Depth stencil format required.</param>
/// <param name="cameraWidth">Unscaled pixel width of the camera.</param>
/// <param name="cameraHeight">Unscaled pixel height of the camera.</param>
/// <seealso cref="RenderTextureDescriptor"/>
public static void ConfigureDepthDescriptor(ref RenderTextureDescriptor descriptor, GraphicsFormat depthStencilFormat, int cameraWidth, int cameraHeight)
{
descriptor.graphicsFormat = GraphicsFormat.None;
descriptor.depthStencilFormat = depthStencilFormat;
descriptor.width = cameraWidth;
descriptor.height = cameraHeight;
}
private static void ExecutePass(RasterCommandBuffer commandBuffer, PassData passData, RendererList rendererList)
{
commandBuffer.DrawRendererList(rendererList);
}
// Specific to RG cases which have to go through Unsafe commands
private static void ExecutePass(UnsafeCommandBuffer commandBuffer, UnsafePassData passData, RendererList rendererList)
{
commandBuffer.DrawRendererList(rendererList);
}
// Non-RenderGraph path
public void Dispose()
{
m_ColorTarget?.Release();
m_DepthTarget?.Release();
}
/// <summary>
/// Configure the pass with the off-screen destination color texture and depth texture to execute the pass on.
/// </summary>
/// <param name="cameraData">Camera rendering data containing all relevant render target information.</param>
/// <param name="depthStencilFormat">Depth stencil format required for depth/stencil effects.</param>
public void Setup(UniversalCameraData cameraData, GraphicsFormat depthStencilFormat)
{
if (m_RenderOffscreen)
{
RenderTextureDescriptor colorDescriptor = cameraData.cameraTargetDescriptor;
ConfigureColorDescriptor(ref colorDescriptor, cameraData.pixelWidth, cameraData.pixelHeight);
RenderingUtils.ReAllocateHandleIfNeeded(ref m_ColorTarget, colorDescriptor, name: "_OverlayUITexture");
RenderTextureDescriptor depthDescriptor = cameraData.cameraTargetDescriptor;
ConfigureDepthDescriptor(ref depthDescriptor, depthStencilFormat, cameraData.pixelWidth, cameraData.pixelHeight);
RenderingUtils.ReAllocateHandleIfNeeded(ref m_DepthTarget, depthDescriptor, name: "_OverlayUITexture_Depth");
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
if(m_RenderOffscreen)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(m_ColorTarget, m_DepthTarget);
ConfigureClear(ClearFlag.Color, Color.clear);
#pragma warning restore CS0618
cmd?.SetGlobalTexture(ShaderPropertyId.overlayUITexture, m_ColorTarget);
}
else
{
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
if (resolveToDebugScreen)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
#pragma warning restore CS0618
}
else
{
// Get RTHandle alias to use RTHandle apis
var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
var colorTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(colorTargetHandle);
#pragma warning restore CS0618
}
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
using (new ProfilingScope(renderingData.commandBuffer, profilingSampler))
{
RendererList rendererList = context.CreateUIOverlayRendererList(renderingData.cameraData.camera);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, rendererList);
}
}
//RenderGraph path
private class PassData
{
internal RendererListHandle rendererList;
}
// Specific to RG cases which have to go through Unsafe commands
private class UnsafePassData
{
internal RendererListHandle rendererList;
internal TextureHandle colorTarget;
}
internal void RenderOffscreen(RenderGraph renderGraph, ContextContainer frameData, GraphicsFormat depthStencilFormat, out TextureHandle output)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
RenderTextureDescriptor colorDescriptor = cameraData.cameraTargetDescriptor;
ConfigureColorDescriptor(ref colorDescriptor, cameraData.pixelWidth, cameraData.pixelHeight);
output = UniversalRenderer.CreateRenderGraphTexture(renderGraph, colorDescriptor, "_OverlayUITexture", true);
RenderTextureDescriptor depthDescriptor = cameraData.cameraTargetDescriptor;
ConfigureDepthDescriptor(ref depthDescriptor, depthStencilFormat, cameraData.pixelWidth, cameraData.pixelHeight);
TextureHandle depthBuffer = UniversalRenderer.CreateRenderGraphTexture(renderGraph, depthDescriptor, "_OverlayUITexture_Depth", false);
// Render uGUI and UIToolkit overlays
using (var builder = renderGraph.AddRasterRenderPass<PassData>("Draw Screen Space UIToolkit/uGUI - Offscreen", out var passData, profilingSampler))
{
// UIToolkit/uGUI pass accept custom shaders, we need to make sure we use all global textures
builder.UseAllGlobalTextures(true);
builder.SetRenderAttachment(output, 0);
passData.rendererList = renderGraph.CreateUIOverlayRendererList(cameraData.camera, UISubset.UIToolkit_UGUI);
builder.UseRendererList(passData.rendererList);
builder.SetRenderAttachmentDepth(depthBuffer, AccessFlags.ReadWrite);
if (output.IsValid())
builder.SetGlobalTextureAfterPass(output, ShaderPropertyId.overlayUITexture);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.rendererList);
});
}
// Render IMGUI overlay and software cursor in a UnsafePass
// Doing so allow us to safely cover cases when graphics commands called through onGUI() in user scripts are not supported by RenderPass API
// Besides, Vulkan backend doesn't support SetSRGWrite() in RenderPass API and we have some of them at IMGUI levels
// Note, these specific UI calls doesn't need depth buffer unlike UIToolkit/uGUI
using (var builder = renderGraph.AddUnsafePass<UnsafePassData>("Draw Screen Space IMGUI/SoftwareCursor - Offscreen", out var passData, profilingSampler))
{
passData.colorTarget = output;
builder.UseTexture(output, AccessFlags.Write);
passData.rendererList = renderGraph.CreateUIOverlayRendererList(cameraData.camera, UISubset.LowLevel);
builder.UseRendererList(passData.rendererList);
builder.SetRenderFunc((UnsafePassData data, UnsafeGraphContext context) =>
{
context.cmd.SetRenderTarget(data.colorTarget);
ExecutePass(context.cmd, data, data.rendererList);
});
}
}
internal void RenderOverlay(RenderGraph renderGraph, ContextContainer frameData, in TextureHandle colorBuffer, in TextureHandle depthBuffer)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalRenderer renderer = cameraData.renderer as UniversalRenderer;
// Render uGUI and UIToolkit overlays
using (var builder = renderGraph.AddRasterRenderPass<PassData>("Draw UIToolkit/uGUI Overlay", out var passData, profilingSampler))
{
// UIToolkit/uGUI pass accept custom shaders, we need to make sure we use all global textures
builder.UseAllGlobalTextures(true);
builder.SetRenderAttachment(colorBuffer, 0);
builder.SetRenderAttachmentDepth(depthBuffer, AccessFlags.ReadWrite);
passData.rendererList = renderGraph.CreateUIOverlayRendererList(cameraData.camera, UISubset.UIToolkit_UGUI);
builder.UseRendererList(passData.rendererList);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.rendererList);
});
}
// Render IMGUI overlay and software cursor in a UnsafePass
// Doing so allow us to safely cover cases when graphics commands called through onGUI() in user scripts are not supported by RenderPass API
// Besides, Vulkan backend doesn't support SetSRGWrite() in RenderPass API and we have some of them at IMGUI levels
// Note, these specific UI calls doesn't need depth buffer unlike UIToolkit/uGUI
using (var builder = renderGraph.AddUnsafePass<UnsafePassData>("Draw IMGUI/SoftwareCursor Overlay", out var passData, profilingSampler))
{
passData.colorTarget = colorBuffer;
builder.UseTexture(colorBuffer, AccessFlags.Write);
passData.rendererList = renderGraph.CreateUIOverlayRendererList(cameraData.camera, UISubset.LowLevel);
builder.UseRendererList(passData.rendererList);
builder.SetRenderFunc((UnsafePassData data, UnsafeGraphContext context) =>
{
context.cmd.SetRenderTarget(data.colorTarget);
ExecutePass(context.cmd, data, data.rendererList);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a834bac5135fab746bc234ec665972de
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,171 @@
using System;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.Universal.Internal;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Draw the skybox into the given color buffer using the given depth buffer for depth testing.
///
/// This pass renders the standard Unity skybox.
/// </summary>
public class DrawSkyboxPass : ScriptableRenderPass
{
/// <summary>
/// Creates a new <c>DrawSkyboxPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <seealso cref="RenderPassEvent"/>
public DrawSkyboxPass(RenderPassEvent evt)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.DrawSkybox);
renderPassEvent = evt;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
var activeDebugHandler = GetActiveDebugHandler(cameraData);
if (activeDebugHandler != null)
{
// TODO: The skybox needs to work the same as the other shaders, but until it does we'll not render it
// when certain debug modes are active (e.g. wireframe/overdraw modes)
if (activeDebugHandler.IsScreenClearNeeded)
{
return;
}
}
var skyRendererList = CreateSkyboxRendererList(context, cameraData);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), cameraData.xr, skyRendererList);
}
// For non-RG path
private RendererList CreateSkyboxRendererList(ScriptableRenderContext context, UniversalCameraData cameraData)
{
var skyRendererList = new RendererList();
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
// Setup Legacy XR buffer states
if (cameraData.xr.singlePassEnabled)
{
skyRendererList = context.CreateSkyboxRendererList(cameraData.camera,
cameraData.GetProjectionMatrix(0), cameraData.GetViewMatrix(0),
cameraData.GetProjectionMatrix(1), cameraData.GetViewMatrix(1));
}
else
{
skyRendererList = context.CreateSkyboxRendererList(cameraData.camera, cameraData.GetProjectionMatrix(0), cameraData.GetViewMatrix(0));
}
}
else
#endif
{
skyRendererList = context.CreateSkyboxRendererList(cameraData.camera);
}
return skyRendererList;
}
// For RG path
private RendererListHandle CreateSkyBoxRendererList(RenderGraph renderGraph, UniversalCameraData cameraData)
{
var skyRendererListHandle = new RendererListHandle();
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
// Setup Legacy XR buffer states
if (cameraData.xr.singlePassEnabled)
{
skyRendererListHandle = renderGraph.CreateSkyboxRendererList(cameraData.camera,
cameraData.GetProjectionMatrix(0), cameraData.GetViewMatrix(0),
cameraData.GetProjectionMatrix(1), cameraData.GetViewMatrix(1));
}
else
{
skyRendererListHandle = renderGraph.CreateSkyboxRendererList(cameraData.camera, cameraData.GetProjectionMatrix(0), cameraData.GetViewMatrix(0));
}
}
else
#endif
{
skyRendererListHandle = renderGraph.CreateSkyboxRendererList(cameraData.camera);
}
return skyRendererListHandle;
}
private static void ExecutePass(RasterCommandBuffer cmd, XRPass xr, RendererList rendererList)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (xr.enabled && xr.singlePassEnabled)
cmd.SetSinglePassStereo(SystemInfo.supportsMultiview ? SinglePassStereoMode.Multiview : SinglePassStereoMode.Instancing);
#endif
cmd.DrawRendererList(rendererList);
#if ENABLE_VR && ENABLE_XR_MODULE
if (xr.enabled && xr.singlePassEnabled)
cmd.SetSinglePassStereo(SinglePassStereoMode.None);
#endif
}
// All the rest below is Render Graph specific
private class PassData
{
internal XRPass xr;
internal RendererListHandle skyRendererListHandle;
internal Material material;
}
private void InitPassData(ref PassData passData, in XRPass xr, in RendererListHandle handle)
{
passData.xr = xr;
passData.skyRendererListHandle = handle;
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, ScriptableRenderContext context, TextureHandle colorTarget, TextureHandle depthTarget, Material skyboxMaterial)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
var activeDebugHandler = GetActiveDebugHandler(cameraData);
if (activeDebugHandler != null)
{
// TODO: The skybox needs to work the same as the other shaders, but until it does we'll not render it
// when certain debug modes are active (e.g. wireframe/overdraw modes)
if (activeDebugHandler.IsScreenClearNeeded)
{
return;
}
}
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
var skyRendererListHandle = CreateSkyBoxRendererList(renderGraph, cameraData);
InitPassData(ref passData, cameraData.xr, skyRendererListHandle);
passData.material = skyboxMaterial;
builder.UseRendererList(skyRendererListHandle);
builder.SetRenderAttachment(colorTarget, 0, AccessFlags.Write);
builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write);
builder.AllowPassCulling(false);
if (cameraData.xr.enabled)
{
bool passSupportsFoveation = cameraData.xrUniversal.canFoveateIntermediatePasses || resourceData.isActiveTargetBackBuffer;
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
}
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data.xr, data.skyRendererListHandle);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ae3e4e9915f7b6347b65203987c4f8b0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,358 @@
using System;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given color target to the current camera target
///
/// You can use this pass to copy the result of rendering to
/// the camera target. The pass takes the screen viewport into
/// consideration.
/// </summary>
public class FinalBlitPass : ScriptableRenderPass
{
RTHandle m_Source;
private PassData m_PassData;
static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
// Use specialed URP fragment shader pass for debug draw support and color space conversion/encoding support.
// See CoreBlit.shader and BlitHDROverlay.shader
static class BlitPassNames
{
public const string NearestSampler = "NearestDebugDraw";
public const string BilinearSampler = "BilinearDebugDraw";
}
enum BlitType
{
Core = 0, // Core blit
HDR = 1, // Blit with HDR encoding and overlay UI compositing
Count = 2
}
struct BlitMaterialData
{
public Material material;
public int nearestSamplerPass;
public int bilinearSamplerPass;
}
BlitMaterialData[] m_BlitMaterialData;
/// <summary>
/// Creates a new <c>FinalBlitPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="blitMaterial">The <c>Material</c> to use for copying the executing the final blit.</param>
/// <param name="blitHDRMaterial">The <c>Material</c> to use for copying the executing the final blit when HDR output is active.</param>
/// <seealso cref="RenderPassEvent"/>
public FinalBlitPass(RenderPassEvent evt, Material blitMaterial, Material blitHDRMaterial)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.BlitFinalToBackBuffer);
base.useNativeRenderPass = false;
m_PassData = new PassData();
renderPassEvent = evt;
// Find sampler passes by name
const int blitTypeCount = (int)BlitType.Count;
m_BlitMaterialData = new BlitMaterialData[blitTypeCount];
for (int i = 0; i < blitTypeCount; ++i)
{
m_BlitMaterialData[i].material = i == (int)BlitType.Core ? blitMaterial : blitHDRMaterial;
m_BlitMaterialData[i].nearestSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.NearestSampler) ?? -1;
m_BlitMaterialData[i].bilinearSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.BilinearSampler) ?? -1;
}
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Dispose()
{
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="baseDescriptor"></param>
/// <param name="colorHandle"></param>
[Obsolete("Use RTHandles for colorHandle", true)]
public void Setup(RenderTextureDescriptor baseDescriptor, RenderTargetHandle colorHandle)
{
throw new NotSupportedException("Setup with RenderTargetHandle has been deprecated. Use it with RTHandles instead.");
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="baseDescriptor"></param>
/// <param name="colorHandle"></param>
public void Setup(RenderTextureDescriptor baseDescriptor, RTHandle colorHandle)
{
m_Source = colorHandle;
}
static void SetupHDROutput(ColorGamut hdrDisplayColorGamut, Material material, HDROutputUtils.Operation hdrOperation, Vector4 hdrOutputParameters, bool rendersOverlayUI)
{
material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, hdrOutputParameters);
HDROutputUtils.ConfigureHDROutput(material, hdrDisplayColorGamut, hdrOperation);
CoreUtils.SetKeyword(material, ShaderKeywordStrings.HDROverlay, rendersOverlayUI);
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
if (resolveToDebugScreen)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
#pragma warning restore CS0618
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
bool outputsToHDR = renderingData.cameraData.isHDROutputActive;
bool outputsAlpha = false;
InitPassData(cameraData, ref m_PassData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha);
if (m_PassData.blitMaterialData.material == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_PassData.blitMaterialData, GetType().Name);
return;
}
var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
// Get RTHandle alias to use RTHandle apis
RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
var cmd = renderingData.commandBuffer;
if (m_Source == cameraData.renderer.GetCameraColorFrontBuffer(cmd))
{
m_Source = renderingData.cameraData.renderer.cameraColorTargetHandle;
}
using (new ProfilingScope(cmd, profilingSampler))
{
m_PassData.blitMaterialData.material.enabledKeywords = null;
debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, !resolveToDebugScreen);
cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion,
cameraData.requireSrgbConversion);
if (outputsToHDR)
{
VolumeStack stack = VolumeManager.instance.stack;
Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
Vector4 hdrOutputLuminanceParams;
UniversalRenderPipeline.GetHDROutputLuminanceParameters(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, tonemapping, out hdrOutputLuminanceParams);
HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
// If the HDRDebugView is on, we don't want the encoding
if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(cameraData.resolveFinalTarget))
hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
// Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
if (!cameraData.postProcessEnabled)
hdrOperation |= HDROutputUtils.Operation.ColorConversion;
SetupHDROutput(cameraData.hdrDisplayColorGamut, m_PassData.blitMaterialData.material, hdrOperation, hdrOutputLuminanceParams, cameraData.rendersOverlayUI);
}
if (resolveToDebugScreen)
{
// Blit to the debugger texture instead of the camera target
int shaderPassIndex = m_Source.rt?.filterMode == FilterMode.Bilinear ? m_PassData.blitMaterialData.bilinearSamplerPass : m_PassData.blitMaterialData.nearestSamplerPass;
Vector2 viewportScale = m_Source.useScaling ? new Vector2(m_Source.rtHandleProperties.rtHandleScale.x, m_Source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
Blitter.BlitTexture(cmd, m_Source, viewportScale, m_PassData.blitMaterialData.material, shaderPassIndex);
cameraData.renderer.ConfigureCameraTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
}
// TODO RENDERGRAPH: See https://jira.unity3d.com/projects/URP/issues/URP-1737
// This branch of the if statement must be removed for render graph and the new command list with a novel way of using Blitter with fill mode
else if (GL.wireframe && cameraData.isSceneViewCamera)
{
// This set render target is necessary so we change the LOAD state to DontCare.
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, // color
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare); // depth
cmd.Blit(m_Source.nameID, cameraTargetHandle.nameID);
}
else
{
// TODO: Final blit pass should always blit to backbuffer. The first time we do we don't need to Load contents to tile.
// We need to keep in the pipeline of first render pass to each render target to properly set load/store actions.
// meanwhile we set to load so split screen case works.
var loadAction = RenderBufferLoadAction.DontCare;
if (!cameraData.isSceneViewCamera && !cameraData.isDefaultViewport)
loadAction = RenderBufferLoadAction.Load;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
loadAction = RenderBufferLoadAction.Load;
#endif
CoreUtils.SetRenderTarget(renderingData.commandBuffer, cameraTargetHandle, loadAction, RenderBufferStoreAction.Store, ClearFlag.None, Color.clear);
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, m_Source, cameraTargetHandle, cameraData);
cameraData.renderer.ConfigureCameraTarget(cameraTargetHandle, cameraTargetHandle);
}
}
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData data, RTHandle source, RTHandle destination, UniversalCameraData cameraData)
{
bool isRenderToBackBufferTarget = !cameraData.isSceneViewCamera;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
isRenderToBackBufferTarget = new RenderTargetIdentifier(destination.nameID, 0, CubemapFace.Unknown, -1) == new RenderTargetIdentifier(cameraData.xr.renderTarget, 0, CubemapFace.Unknown, -1);
#endif
Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(source, destination, cameraData);
if (isRenderToBackBufferTarget)
cmd.SetViewport(cameraData.pixelRect);
// turn off any global wireframe & "scene view wireframe shader hijack" settings for doing blits:
// we never want them to show up as wireframe
cmd.SetWireframe(false);
CoreUtils.SetKeyword(data.blitMaterialData.material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
int shaderPassIndex = source.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
Blitter.BlitTexture(cmd, source, scaleBias, data.blitMaterialData.material, shaderPassIndex);
}
private class PassData
{
internal TextureHandle source;
internal TextureHandle destination;
internal int sourceID;
internal Vector4 hdrOutputLuminanceParams;
internal bool requireSrgbConversion;
internal bool enableAlphaOutput;
internal BlitMaterialData blitMaterialData;
internal UniversalCameraData cameraData;
}
/// <summary>
/// Initialize the shared pass data.
/// </summary>
/// <param name="passData"></param>
private void InitPassData(UniversalCameraData cameraData, ref PassData passData, BlitType blitType, bool enableAlphaOutput)
{
passData.cameraData = cameraData;
passData.requireSrgbConversion = cameraData.requireSrgbConversion;
passData.enableAlphaOutput = enableAlphaOutput;
passData.blitMaterialData = m_BlitMaterialData[(int)blitType];
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, UniversalCameraData cameraData, in TextureHandle src, in TextureHandle dest, TextureHandle overlayUITexture)
{
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
// Only the UniversalRenderer guarantees that global textures will be available at this point
bool isUniversalRenderer = (cameraData.renderer as UniversalRenderer) != null;
if (cameraData.requiresDepthTexture && isUniversalRenderer)
builder.UseGlobalTexture(s_CameraDepthTextureID);
bool outputsToHDR = cameraData.isHDROutputActive;
bool outputsAlpha = cameraData.isAlphaOutputEnabled;
InitPassData(cameraData, ref passData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha);
passData.sourceID = ShaderPropertyId.sourceTex;
passData.source = src;
builder.UseTexture(src, AccessFlags.Read);
passData.destination = dest;
// Default flag for non-XR common case
AccessFlags targetAccessFlag = AccessFlags.Write;
#if ENABLE_VR && ENABLE_XR_MODULE
// This is a screen-space pass, make sure foveated rendering is disabled for non-uniform renders
bool passSupportsFoveation = !XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster);
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
// Optimization: In XR, we don't have split screen use case.
// The access flag can be set to WriteAll if there is a full screen blit and no alpha blending,
// so engine will set loadOperation to DontCare down to the pipe.
if (cameraData.xr.enabled && cameraData.isDefaultViewport && !outputsAlpha)
targetAccessFlag = AccessFlags.WriteAll;
#endif
builder.SetRenderAttachment(dest, 0, targetAccessFlag);
if (outputsToHDR && overlayUITexture.IsValid())
{
VolumeStack stack = VolumeManager.instance.stack;
Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
UniversalRenderPipeline.GetHDROutputLuminanceParameters(passData.cameraData.hdrDisplayInformation, passData.cameraData.hdrDisplayColorGamut, tonemapping, out passData.hdrOutputLuminanceParams);
builder.UseTexture(overlayUITexture, AccessFlags.Read);
}
else
{
passData.hdrOutputLuminanceParams = new Vector4(-1.0f, -1.0f, -1.0f, -1.0f);
}
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
data.blitMaterialData.material.enabledKeywords = null;
context.cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion, data.requireSrgbConversion);
data.blitMaterialData.material.SetTexture(data.sourceID, data.source);
DebugHandler debugHandler = GetActiveDebugHandler(data.cameraData);
bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(data.cameraData.resolveFinalTarget);
// TODO RENDERGRAPH: this should ideally be shared in ExecutePass to avoid code duplication
if (data.hdrOutputLuminanceParams.w >= 0)
{
HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
// If the HDRDebugView is on, we don't want the encoding
if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(data.cameraData.resolveFinalTarget))
hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
// Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
if (!data.cameraData.postProcessEnabled)
hdrOperation |= HDROutputUtils.Operation.ColorConversion;
SetupHDROutput(data.cameraData.hdrDisplayColorGamut, data.blitMaterialData.material, hdrOperation, data.hdrOutputLuminanceParams, data.cameraData.rendersOverlayUI);
}
if (resolveToDebugScreen)
{
RTHandle sourceTex = data.source;
Vector2 viewportScale = sourceTex.useScaling ? new Vector2(sourceTex.rtHandleProperties.rtHandleScale.x, sourceTex.rtHandleProperties.rtHandleScale.y) : Vector2.one;
int shaderPassIndex = sourceTex.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
Blitter.BlitTexture(context.cmd, sourceTex, viewportScale, data.blitMaterialData.material, shaderPassIndex);
}
else
ExecutePass(context.cmd, data, data.source, data.destination, data.cameraData);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b66efce03c1804a4fbef78cccf176e4d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,268 @@
using System;
using UnityEngine.Experimental.GlobalIllumination;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Profiling;
using Unity.Collections;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
// Render all tiled-based deferred lights.
internal class GBufferPass : ScriptableRenderPass
{
// Statics
private static readonly int s_CameraNormalsTextureID = Shader.PropertyToID("_CameraNormalsTexture");
private static readonly int s_CameraRenderingLayersTextureID = Shader.PropertyToID("_CameraRenderingLayersTexture");
private static readonly ShaderTagId s_ShaderTagLit = new ShaderTagId("Lit");
private static readonly ShaderTagId s_ShaderTagSimpleLit = new ShaderTagId("SimpleLit");
private static readonly ShaderTagId s_ShaderTagUnlit = new ShaderTagId("Unlit");
private static readonly ShaderTagId s_ShaderTagComplexLit = new ShaderTagId("ComplexLit");
private static readonly ShaderTagId s_ShaderTagUniversalGBuffer = new ShaderTagId("UniversalGBuffer");
private static readonly ShaderTagId s_ShaderTagUniversalMaterialType = new ShaderTagId("UniversalMaterialType");
DeferredLights m_DeferredLights;
static ShaderTagId[] s_ShaderTagValues;
static RenderStateBlock[] s_RenderStateBlocks;
FilteringSettings m_FilteringSettings;
RenderStateBlock m_RenderStateBlock;
private PassData m_PassData;
public GBufferPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference, DeferredLights deferredLights)
{
base.profilingSampler = new ProfilingSampler("Draw GBuffer");
base.renderPassEvent = evt;
m_PassData = new PassData();
m_DeferredLights = deferredLights;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_RenderStateBlock.stencilState = stencilState;
m_RenderStateBlock.stencilReference = stencilReference;
m_RenderStateBlock.mask = RenderStateMask.Stencil;
if (s_ShaderTagValues == null)
{
s_ShaderTagValues = new ShaderTagId[5];
s_ShaderTagValues[0] = s_ShaderTagLit;
s_ShaderTagValues[1] = s_ShaderTagSimpleLit;
s_ShaderTagValues[2] = s_ShaderTagUnlit;
s_ShaderTagValues[3] = s_ShaderTagComplexLit;
s_ShaderTagValues[4] = new ShaderTagId(); // Special catch all case for materials where UniversalMaterialType is not defined or the tag value doesn't match anything we know.
}
if (s_RenderStateBlocks == null)
{
s_RenderStateBlocks = new RenderStateBlock[5];
s_RenderStateBlocks[0] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialLit);
s_RenderStateBlocks[1] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialSimpleLit);
s_RenderStateBlocks[2] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialUnlit);
s_RenderStateBlocks[3] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialUnlit); // Fill GBuffer, but skip lighting pass for ComplexLit
s_RenderStateBlocks[4] = s_RenderStateBlocks[0];
}
}
public void Dispose()
{
m_DeferredLights?.ReleaseGbufferResources();
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
RTHandle[] gbufferAttachments = m_DeferredLights.GbufferAttachments;
if (cmd != null)
{
var allocateGbufferDepth = true;
if (m_DeferredLights.UseFramebufferFetch && (m_DeferredLights.DepthCopyTexture != null && m_DeferredLights.DepthCopyTexture.rt != null))
{
m_DeferredLights.GbufferAttachments[m_DeferredLights.GbufferDepthIndex] = m_DeferredLights.DepthCopyTexture;
allocateGbufferDepth = false;
}
// Create and declare the render targets used in the pass
for (int i = 0; i < gbufferAttachments.Length; ++i)
{
// Lighting buffer has already been declared with line ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), ...) in DeferredRenderer.Setup
if (i == m_DeferredLights.GBufferLightingIndex)
continue;
// Normal buffer may have already been created if there was a depthNormal prepass before.
// DepthNormal prepass is needed for forward-only materials when SSAO is generated between gbuffer and deferred lighting pass.
if (i == m_DeferredLights.GBufferNormalSmoothnessIndex && m_DeferredLights.HasNormalPrepass)
continue;
if (i == m_DeferredLights.GbufferDepthIndex && !allocateGbufferDepth)
continue;
// No need to setup temporaryRTs if we are using input attachments as they will be Memoryless
if (m_DeferredLights.UseFramebufferFetch && (i != m_DeferredLights.GbufferDepthIndex && !m_DeferredLights.HasDepthPrepass))
continue;
m_DeferredLights.ReAllocateGBufferIfNeeded(cameraTextureDescriptor, i);
cmd.SetGlobalTexture(m_DeferredLights.GbufferAttachments[i].name, m_DeferredLights.GbufferAttachments[i].nameID);
}
}
if (m_DeferredLights.UseFramebufferFetch)
m_DeferredLights.UpdateDeferredInputAttachments();
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(m_DeferredLights.GbufferAttachments, m_DeferredLights.DepthAttachment, m_DeferredLights.GbufferFormats);
// We must explicitly specify we don't want any clear to avoid unwanted side-effects.
// ScriptableRenderer will implicitly force a clear the first time the camera color/depth targets are bound.
ConfigureClear(ClearFlag.None, Color.black);
#pragma warning restore CS0618
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
m_PassData.deferredLights = m_DeferredLights;
InitRendererLists(ref m_PassData, context, default(RenderGraph), universalRenderingData, cameraData, lightData, false);
var cmd = renderingData.commandBuffer;
using (new ProfilingScope(cmd, profilingSampler))
{
#if UNITY_EDITOR
// Need to clear the bounded targets to get scene-view filtering working.
if (CoreUtils.IsSceneFilteringEnabled() && cameraData.camera.sceneViewFilterMode == Camera.SceneViewFilterMode.ShowFiltered)
cmd.ClearRenderTarget(RTClearFlags.Color, Color.clear);
#endif
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(cmd), m_PassData, m_PassData.rendererList, m_PassData.objectsWithErrorRendererList);
// If any sub-system needs camera normal texture, make it available.
// Input attachments will only be used when this is not needed so safe to skip in that case
if (!m_DeferredLights.UseFramebufferFetch)
renderingData.commandBuffer.SetGlobalTexture(s_CameraNormalsTextureID, m_DeferredLights.GbufferAttachments[m_DeferredLights.GBufferNormalSmoothnessIndex]);
}
}
static void ExecutePass(RasterCommandBuffer cmd, PassData data, RendererList rendererList, RendererList errorRendererList)
{
bool usesRenderingLayers = data.deferredLights.UseRenderingLayers && !data.deferredLights.HasRenderingLayerPrepass;
if (usesRenderingLayers)
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, true);
cmd.DrawRendererList(rendererList);
// Render objects that did not match any shader pass with error shader
RenderingUtils.DrawRendererListObjectsWithError(cmd, ref errorRendererList);
// Clean up
if (usesRenderingLayers)
cmd.SetKeyword(ShaderGlobalKeywords.WriteRenderingLayers, false);
}
/// <summary>
/// Shared pass data
/// </summary>
private class PassData
{
internal TextureHandle[] gbuffer;
internal TextureHandle depth;
internal DeferredLights deferredLights;
internal RendererListHandle rendererListHdl;
internal RendererListHandle objectsWithErrorRendererListHdl;
// Required for code sharing purpose between RG and non-RG.
internal RendererList rendererList;
internal RendererList objectsWithErrorRendererList;
}
private void InitRendererLists( ref PassData passData, ScriptableRenderContext context, RenderGraph renderGraph, UniversalRenderingData renderingData, UniversalCameraData cameraData, UniversalLightData lightData, bool useRenderGraph, uint batchLayerMask = uint.MaxValue)
{
// User can stack several scriptable renderers during rendering but deferred renderer should only lit pixels added by this gbuffer pass.
// If we detect we are in such case (camera is in overlay mode), we clear the highest bits of stencil we have control of and use them to
// mark what pixel to shade during deferred pass. Gbuffer will always mark pixels using their material types.
ShaderTagId lightModeTag = s_ShaderTagUniversalGBuffer;
var drawingSettings = CreateDrawingSettings(lightModeTag, renderingData, cameraData, lightData, cameraData.defaultOpaqueSortFlags);
var filterSettings = m_FilteringSettings;
filterSettings.batchLayerMask = batchLayerMask;
#if UNITY_EDITOR
// When rendering the preview camera, we want the layer mask to be forced to Everything
if (cameraData.isPreviewCamera)
filterSettings.layerMask = -1;
#endif
NativeArray<ShaderTagId> tagValues = new NativeArray<ShaderTagId>(s_ShaderTagValues, Allocator.Temp);
NativeArray<RenderStateBlock> stateBlocks = new NativeArray<RenderStateBlock>(s_RenderStateBlocks, Allocator.Temp);
var param = new RendererListParams(renderingData.cullResults, drawingSettings, filterSettings)
{
tagValues = tagValues,
stateBlocks = stateBlocks,
tagName = s_ShaderTagUniversalMaterialType,
isPassTagName = false
};
if (useRenderGraph)
{
passData.rendererListHdl = renderGraph.CreateRendererList(param);
RenderingUtils.CreateRendererListObjectsWithError(renderGraph, ref renderingData.cullResults, cameraData.camera, filterSettings, SortingCriteria.None, ref passData.objectsWithErrorRendererListHdl);
}
else
{
passData.rendererList = context.CreateRendererList(ref param);
RenderingUtils.CreateRendererListObjectsWithError(context, ref renderingData.cullResults, cameraData.camera, filterSettings, SortingCriteria.None, ref passData.objectsWithErrorRendererList);
}
tagValues.Dispose();
stateBlocks.Dispose();
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraColor, TextureHandle cameraDepth, bool setGlobalTextures, uint batchLayerMask = uint.MaxValue)
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
using var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler);
bool useCameraRenderingLayersTexture = m_DeferredLights.UseRenderingLayers && !m_DeferredLights.UseLightLayers;
passData.gbuffer = m_DeferredLights.GbufferTextureHandles;
for (int i = 0; i < m_DeferredLights.GBufferSliceCount; i++)
{
Debug.Assert(passData.gbuffer[i].IsValid());
builder.SetRenderAttachment(passData.gbuffer[i], i, AccessFlags.Write);
}
RenderGraphUtils.UseDBufferIfValid(builder, resourceData);
passData.depth = cameraDepth;
builder.SetRenderAttachmentDepth(cameraDepth, AccessFlags.Write);
passData.deferredLights = m_DeferredLights;
InitRendererLists(ref passData, default(ScriptableRenderContext), renderGraph, renderingData, cameraData, lightData, true);
builder.UseRendererList(passData.rendererListHdl);
builder.UseRendererList(passData.objectsWithErrorRendererListHdl);
if (setGlobalTextures)
{
builder.SetGlobalTextureAfterPass(resourceData.cameraNormalsTexture, s_CameraNormalsTextureID);
if (useCameraRenderingLayersTexture)
builder.SetGlobalTextureAfterPass(resourceData.renderingLayersTexture, s_CameraRenderingLayersTextureID);
}
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data, data.rendererListHdl, data.objectsWithErrorRendererListHdl);
});
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 10e31bb2e2b26314bb6132009378847c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,293 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Generate HDR debug data into the given color target
/// </summary>
internal class HDRDebugViewPass : ScriptableRenderPass
{
private enum HDRDebugPassId
{
CIExyPrepass = 0,
DebugViewPass = 1
}
PassDataCIExy m_PassDataCIExy;
PassDataDebugView m_PassDataDebugView;
RTHandle m_CIExyTarget; // xyBuffer;
RTHandle m_PassthroughRT;
Material m_material;
/// <summary>
/// Creates a new <c>HDRDebugViewPass</c> instance.
/// </summary>
/// <param name="mat">The <c>Material</c> to use.</param>
/// <seealso cref="RenderPassEvent"/>
public HDRDebugViewPass(Material mat)
{
profilingSampler = new ProfilingSampler("Blit HDR Debug Data");
renderPassEvent = RenderPassEvent.AfterRendering + 3;
m_PassDataCIExy = new PassDataCIExy() { material = mat };
m_PassDataDebugView = new PassDataDebugView() { material = mat };
m_material = mat;
// Disabling native render passes (for non-RG) because it renders to 2 different render targets
useNativeRenderPass = false;
}
// Common to RenderGraph and non-RenderGraph paths
private class PassDataCIExy
{
internal Material material;
internal Vector4 luminanceParameters;
internal TextureHandle srcColor;
internal TextureHandle xyBuffer;
internal TextureHandle passThrough;
}
private class PassDataDebugView
{
internal Material material;
internal HDRDebugMode hdrDebugMode;
internal UniversalCameraData cameraData;
internal Vector4 luminanceParameters;
internal TextureHandle overlayUITexture;
internal TextureHandle xyBuffer;
internal TextureHandle srcColor;
internal TextureHandle dstColor;
}
public static void ConfigureDescriptorForCIEPrepass(ref RenderTextureDescriptor descriptor)
{
descriptor.graphicsFormat = GraphicsFormat.R32_SFloat;
descriptor.width = descriptor.height = ShaderConstants._SizeOfHDRXYMapping;
descriptor.useMipMap = false;
descriptor.autoGenerateMips = false;
descriptor.useDynamicScale = true;
descriptor.depthStencilFormat = GraphicsFormat.None;
descriptor.enableRandomWrite = true;
descriptor.msaaSamples = 1;
descriptor.dimension = TextureDimension.Tex2D;
descriptor.vrUsage = VRTextureUsage.None; // We only need one for both eyes in VR
}
internal static Vector4 GetLuminanceParameters(UniversalCameraData cameraData)
{
var luminanceParams = Vector4.zero;
if (cameraData.isHDROutputActive)
{
Tonemapping tonemapping = VolumeManager.instance.stack.GetComponent<Tonemapping>();
UniversalRenderPipeline.GetHDROutputLuminanceParameters(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, tonemapping, out luminanceParams);
}
else
{
luminanceParams.z = 1.0f;
}
return luminanceParams;
}
private static void ExecuteCIExyPrepass(CommandBuffer cmd, PassDataCIExy data, RTHandle sourceTexture, RTHandle xyTarget, RTHandle destTexture)
{
CoreUtils.SetRenderTarget(cmd, destTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare, ClearFlag.None, Color.clear);
Vector4 debugParameters = new Vector4(ShaderConstants._SizeOfHDRXYMapping, ShaderConstants._SizeOfHDRXYMapping, 0, 0);
cmd.SetRandomWriteTarget(ShaderConstants._CIExyUAVIndex, xyTarget);
data.material.SetVector(ShaderConstants._HDRDebugParamsId, debugParameters);
data.material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, data.luminanceParameters);
Vector2 viewportScale = sourceTexture.useScaling ? new Vector2(sourceTexture.rtHandleProperties.rtHandleScale.x, sourceTexture.rtHandleProperties.rtHandleScale.y) : Vector2.one;
Blitter.BlitTexture(cmd, sourceTexture, viewportScale, data.material, 0);
cmd.ClearRandomWriteTargets();
}
private static void ExecuteHDRDebugViewFinalPass(RasterCommandBuffer cmd, PassDataDebugView data, RTHandle sourceTexture, RTHandle destination, RTHandle xyTarget)
{
if (data.cameraData.isHDROutputActive)
{
HDROutputUtils.ConfigureHDROutput(data.material, data.cameraData.hdrDisplayColorGamut, HDROutputUtils.Operation.ColorEncoding);
CoreUtils.SetKeyword(data.material, ShaderKeywordStrings.HDROverlay, data.cameraData.rendersOverlayUI);
}
data.material.SetTexture(ShaderConstants._xyTextureId, xyTarget);
Vector4 debugParameters = new Vector4(ShaderConstants._SizeOfHDRXYMapping, ShaderConstants._SizeOfHDRXYMapping, 0, 0);
data.material.SetVector(ShaderConstants._HDRDebugParamsId, debugParameters);
data.material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, data.luminanceParameters);
data.material.SetInteger(ShaderConstants._DebugHDRModeId, (int)data.hdrDebugMode);
Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(sourceTexture, destination, data.cameraData);
RenderTargetIdentifier cameraTarget = BuiltinRenderTextureType.CameraTarget;
#if ENABLE_VR && ENABLE_XR_MODULE
if (data.cameraData.xr.enabled)
cameraTarget = data.cameraData.xr.renderTarget;
#endif
if (destination.nameID == cameraTarget || data.cameraData.targetTexture != null)
cmd.SetViewport(data.cameraData.pixelRect);
Blitter.BlitTexture(cmd, sourceTexture, scaleBias, data.material, 1);
}
// Non-RenderGraph path
public void Dispose()
{
m_CIExyTarget?.Release();
m_PassthroughRT?.Release();
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="cameraData">Descriptor for the color buffer.</param>
/// <param name="hdrdebugMode">Active DebugMode for HDR.</param>
public void Setup(UniversalCameraData cameraData, HDRDebugMode hdrdebugMode)
{
m_PassDataDebugView.hdrDebugMode = hdrdebugMode;
RenderTextureDescriptor descriptor = cameraData.cameraTargetDescriptor;
DebugHandler.ConfigureColorDescriptorForDebugScreen(ref descriptor, cameraData.pixelWidth, cameraData.pixelHeight);
RenderingUtils.ReAllocateHandleIfNeeded(ref m_PassthroughRT, descriptor, name: "_HDRDebugDummyRT");
RenderTextureDescriptor descriptorCIE = cameraData.cameraTargetDescriptor;
HDRDebugViewPass.ConfigureDescriptorForCIEPrepass(ref descriptorCIE);
RenderingUtils.ReAllocateHandleIfNeeded(ref m_CIExyTarget, descriptorCIE, name: "_xyBuffer");
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
var cmd = renderingData.commandBuffer;
m_PassDataCIExy.luminanceParameters = m_PassDataDebugView.luminanceParameters = GetLuminanceParameters(cameraData);
m_PassDataDebugView.cameraData = cameraData;
var sourceTexture = renderingData.cameraData.renderer.cameraColorTargetHandle;
var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
// Get RTHandle alias to use RTHandle apis
RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
m_material.enabledKeywords = null;
GetActiveDebugHandler(cameraData)?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, true);
CoreUtils.SetRenderTarget(cmd, m_CIExyTarget, ClearFlag.Color, Color.clear);
ExecutePass(cmd, m_PassDataCIExy, m_PassDataDebugView, sourceTexture, m_CIExyTarget, cameraTargetHandle);
}
private void ExecutePass(CommandBuffer cmd, PassDataCIExy dataCIExy, PassDataDebugView dataDebugView, RTHandle sourceTexture, RTHandle xyTarget, RTHandle destTexture)
{
RasterCommandBuffer rasterCmd = CommandBufferHelpers.GetRasterCommandBuffer(cmd);
//CIExyPrepass
bool requiresCIExyData = dataDebugView.hdrDebugMode != HDRDebugMode.ValuesAbovePaperWhite;
if (requiresCIExyData)
{
using (new ProfilingScope(cmd, profilingSampler))
{
ExecuteCIExyPrepass(cmd, dataCIExy, sourceTexture, xyTarget, m_PassthroughRT);
}
}
//HDR DebugView - should always be the last stack of the camera
CoreUtils.SetRenderTarget(cmd, destTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, ClearFlag.None, Color.clear);
using (new ProfilingScope(cmd, profilingSampler))
{
ExecuteHDRDebugViewFinalPass(rasterCmd, dataDebugView, sourceTexture, destTexture, xyTarget);
}
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
dataDebugView.cameraData.renderer.ConfigureCameraTarget(destTexture, destTexture);
#pragma warning restore CS0618
}
//RenderGraph path
internal void RenderHDRDebug(RenderGraph renderGraph, UniversalCameraData cameraData, TextureHandle srcColor, TextureHandle overlayUITexture, TextureHandle dstColor, HDRDebugMode hdrDebugMode)
{
bool requiresCIExyData = hdrDebugMode != HDRDebugMode.ValuesAbovePaperWhite;
Vector4 luminanceParameters = GetLuminanceParameters(cameraData);
TextureHandle intermediateRT = srcColor;
TextureHandle xyBuffer = TextureHandle.nullHandle;
if (requiresCIExyData)
{
RenderTextureDescriptor descriptor = cameraData.cameraTargetDescriptor;
DebugHandler.ConfigureColorDescriptorForDebugScreen(ref descriptor, cameraData.pixelWidth, cameraData.pixelHeight);
intermediateRT = UniversalRenderer.CreateRenderGraphTexture(renderGraph, descriptor, "_HDRDebugDummyRT", false);
ConfigureDescriptorForCIEPrepass(ref descriptor);
xyBuffer = UniversalRenderer.CreateRenderGraphTexture(renderGraph, descriptor, "_xyBuffer", true);
// Using low level pass because of random UAV support, and since this is a debug view, we don't care much about merging passes or optimizing for TBDR.
// This could be a compute pass (like in HDRP) but doing it in pixel is compatible with devices that might support HDR output but not compute shaders.
using (var builder = renderGraph.AddUnsafePass<PassDataCIExy>("Blit HDR DebugView CIExy", out var passData, profilingSampler))
{
passData.material = m_material;
passData.luminanceParameters = luminanceParameters;
passData.srcColor = srcColor;
builder.UseTexture(srcColor);
passData.xyBuffer = xyBuffer;
builder.UseTexture(xyBuffer, AccessFlags.Write);
passData.passThrough = intermediateRT;
builder.UseTexture(intermediateRT, AccessFlags.Write);
builder.SetRenderFunc((PassDataCIExy data, UnsafeGraphContext context) =>
{
ExecuteCIExyPrepass(CommandBufferHelpers.GetNativeCommandBuffer(context.cmd), data, data.srcColor, data.xyBuffer, data.passThrough);
});
}
}
using (var builder = renderGraph.AddRasterRenderPass<PassDataDebugView>("Blit HDR DebugView", out var passData, profilingSampler))
{
passData.material = m_material;
passData.hdrDebugMode = hdrDebugMode;
passData.luminanceParameters = luminanceParameters;
passData.cameraData = cameraData;
if (requiresCIExyData)
{
passData.xyBuffer = xyBuffer;
builder.UseTexture(xyBuffer);
}
passData.srcColor = srcColor;
builder.UseTexture(srcColor);
passData.dstColor = dstColor;
builder.SetRenderAttachment(dstColor, 0, AccessFlags.WriteAll);
if (overlayUITexture.IsValid())
{
passData.overlayUITexture = overlayUITexture;
builder.UseTexture(overlayUITexture);
}
builder.SetRenderFunc((PassDataDebugView data, RasterGraphContext context) =>
{
data.material.enabledKeywords = null;
ExecuteHDRDebugViewFinalPass(context.cmd, data, data.srcColor, data.dstColor, data.xyBuffer);
});
}
}
internal class ShaderConstants
{
public static readonly int _DebugHDRModeId = Shader.PropertyToID("_DebugHDRMode");
public static readonly int _HDRDebugParamsId = Shader.PropertyToID("_HDRDebugParams");
public static readonly int _xyTextureId = Shader.PropertyToID("_xyBuffer");
public static readonly int _SizeOfHDRXYMapping = 512;
public static readonly int _CIExyUAVIndex = 1;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4bcbfb254d9395f439b5d8a711e29acd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,51 @@
using System;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Invokes OnRenderObject callback
/// </summary>
internal class InvokeOnRenderObjectCallbackPass : ScriptableRenderPass
{
public InvokeOnRenderObjectCallbackPass(RenderPassEvent evt)
{
profilingSampler = new ProfilingSampler("Invoke OnRenderObject Callback");
renderPassEvent = evt;
//TODO: should we fix and re-enable native render pass for this pass?
// Currently disabled because when the callback is empty it causes an empty Begin/End RenderPass block, which causes artifacts on Vulkan
useNativeRenderPass = false;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
renderingData.commandBuffer.InvokeOnRenderObjectCallbacks();
}
private class PassData
{
internal TextureHandle colorTarget;
internal TextureHandle depthTarget;
}
internal void Render(RenderGraph renderGraph, TextureHandle colorTarget, TextureHandle depthTarget)
{
using (var builder = renderGraph.AddUnsafePass<PassData>(passName, out var passData, profilingSampler))
{
passData.colorTarget = colorTarget;
builder.UseTexture(colorTarget, AccessFlags.Write);
passData.depthTarget = depthTarget;
builder.UseTexture(depthTarget, AccessFlags.Write);
builder.AllowPassCulling(false);
builder.SetRenderFunc((PassData data, UnsafeGraphContext context) =>
{
context.cmd.SetRenderTarget(data.colorTarget, data.depthTarget);
context.cmd.InvokeOnRenderObjectCallbacks();
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 65317dda628fae54d953accd02570d92
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,512 @@
using System;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Renders a shadow map for the main Light.
/// </summary>
public class MainLightShadowCasterPass : ScriptableRenderPass
{
// Internal
internal RTHandle m_MainLightShadowmapTexture;
// Private
private int renderTargetWidth;
private int renderTargetHeight;
private int m_ShadowCasterCascadesCount;
private bool m_CreateEmptyShadowmap;
private bool m_SetKeywordForEmptyShadowmap;
private bool m_EmptyShadowmapNeedsClear;
private float m_CascadeBorder;
private float m_MaxShadowDistanceSq;
private PassData m_PassData;
private RTHandle m_EmptyMainLightShadowmapTexture;
private RenderTextureDescriptor m_MainLightShadowDescriptor;
private readonly Vector4[] m_CascadeSplitDistances;
private readonly Matrix4x4[] m_MainLightShadowMatrices;
private readonly ProfilingSampler m_ProfilingSetupSampler = new ("Setup Main Shadowmap");
private readonly ShadowSliceData[] m_CascadeSlices;
// Constants and Statics
private const int k_EmptyShadowMapDimensions = 1;
private const int k_MaxCascades = 4;
private const int k_ShadowmapBufferBits = 16;
private const string k_MainLightShadowMapTextureName = "_MainLightShadowmapTexture";
private const string k_EmptyMainLightShadowMapTextureName = "_EmptyMainLightShadowmapTexture";
private static Vector4 s_EmptyShadowParams = new (0f, 0f, 1f, 0f);
private static readonly Vector4 s_EmptyShadowmapSize = new (k_EmptyShadowMapDimensions, 1f / k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions);
// Classes
private static class MainLightShadowConstantBuffer
{
public static readonly int _WorldToShadow = Shader.PropertyToID("_MainLightWorldToShadow");
public static readonly int _ShadowParams = Shader.PropertyToID("_MainLightShadowParams");
public static readonly int _CascadeShadowSplitSpheres0 = Shader.PropertyToID("_CascadeShadowSplitSpheres0");
public static readonly int _CascadeShadowSplitSpheres1 = Shader.PropertyToID("_CascadeShadowSplitSpheres1");
public static readonly int _CascadeShadowSplitSpheres2 = Shader.PropertyToID("_CascadeShadowSplitSpheres2");
public static readonly int _CascadeShadowSplitSpheres3 = Shader.PropertyToID("_CascadeShadowSplitSpheres3");
public static readonly int _CascadeShadowSplitSphereRadii = Shader.PropertyToID("_CascadeShadowSplitSphereRadii");
public static readonly int _ShadowOffset0 = Shader.PropertyToID("_MainLightShadowOffset0");
public static readonly int _ShadowOffset1 = Shader.PropertyToID("_MainLightShadowOffset1");
public static readonly int _ShadowmapSize = Shader.PropertyToID("_MainLightShadowmapSize");
public static readonly int _MainLightShadowmapID = Shader.PropertyToID(k_MainLightShadowMapTextureName);
}
private class PassData
{
internal bool emptyShadowmap;
internal bool setKeywordForEmptyShadowmap;
internal UniversalRenderingData renderingData;
internal UniversalCameraData cameraData;
internal UniversalLightData lightData;
internal UniversalShadowData shadowData;
internal MainLightShadowCasterPass pass;
internal TextureHandle shadowmapTexture;
internal readonly RendererList[] shadowRendererLists = new RendererList[k_MaxCascades];
internal readonly RendererListHandle[] shadowRendererListsHandle = new RendererListHandle[k_MaxCascades];
}
/// <summary>
/// Creates a new <c>MainLightShadowCasterPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <seealso cref="RenderPassEvent"/>
public MainLightShadowCasterPass(RenderPassEvent evt)
{
profilingSampler = new ProfilingSampler("Draw Main Light Shadowmap");
renderPassEvent = evt;
m_PassData = new PassData();
m_MainLightShadowMatrices = new Matrix4x4[k_MaxCascades + 1];
m_CascadeSlices = new ShadowSliceData[k_MaxCascades];
m_CascadeSplitDistances = new Vector4[k_MaxCascades];
m_EmptyShadowmapNeedsClear = true;
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Dispose()
{
m_MainLightShadowmapTexture?.Release();
m_EmptyMainLightShadowmapTexture?.Release();
}
/// <summary>
/// Sets up the pass.
/// </summary>
/// <param name="renderingData"></param>
/// <returns>True if the pass should be enqueued, otherwise false.</returns>
/// <seealso cref="RenderingData"/>
public bool Setup(ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
return Setup(universalRenderingData, cameraData, lightData, shadowData);
}
/// <summary>
/// Sets up the pass.
/// </summary>
/// <param name="renderingData">Data containing rendering settings.</param>
/// <param name="cameraData">Data containing camera settings.</param>
/// <param name="lightData">Data containing light settings.</param>
/// <param name="shadowData">Data containing shadow settings.</param>
/// <returns>True if the pass should be enqueued, otherwise false.</returns>
/// <seealso cref="RenderingData"/>
public bool Setup(UniversalRenderingData renderingData, UniversalCameraData cameraData, UniversalLightData lightData, UniversalShadowData shadowData)
{
bool shadowsEnabled = shadowData.mainLightShadowsEnabled;
bool shadowsSupported = shadowData.supportsMainLightShadows;
#if UNITY_EDITOR
if (CoreUtils.IsSceneLightingDisabled(cameraData.camera))
return false;
#endif
using var profScope = new ProfilingScope(m_ProfilingSetupSampler);
bool stripShadowsOffVariants = cameraData.renderer.stripShadowsOffVariants;
Clear();
int shadowLightIndex = lightData.mainLightIndex;
if (shadowLightIndex == -1)
{
if (shadowsEnabled)
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, null, cameraData, shadowData);
else
return false;
}
VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex];
Light light = shadowLight.light;
if (shadowsSupported && light.shadows == LightShadows.None)
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, light, cameraData, shadowData);
if (!shadowsEnabled)
{
// If (realtime) shadows are disabled, but the light casts baked shadows, we need to do empty rendering to setup the _MainLightShadowParams uniform,
// which is also used when sampling baked shadows. This allows for using baked shadows even when realtime shadows are completely disabled.
if (light.shadows != LightShadows.None &&
light.bakingOutput.isBaked &&
light.bakingOutput.mixedLightingMode != MixedLightingMode.IndirectOnly &&
light.bakingOutput.lightmapBakeType == LightmapBakeType.Mixed)
{
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, light, cameraData, shadowData);
}
return false;
}
if (!shadowsSupported)
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, null, cameraData, shadowData);
if (shadowLight.lightType != LightType.Directional)
{
Debug.LogWarning("Only directional lights are supported as main light.");
}
if (!renderingData.cullResults.GetShadowCasterBounds(shadowLightIndex, out Bounds _))
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, light, cameraData, shadowData);
m_ShadowCasterCascadesCount = shadowData.mainLightShadowCascadesCount;
renderTargetWidth = shadowData.mainLightRenderTargetWidth;
renderTargetHeight = shadowData.mainLightRenderTargetHeight;
ref readonly URPLightShadowCullingInfos shadowCullingInfos = ref shadowData.visibleLightsShadowCullingInfos.UnsafeElementAt(shadowLightIndex);
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
ref readonly ShadowSliceData sliceData = ref shadowCullingInfos.slices.UnsafeElementAt(cascadeIndex);
m_CascadeSplitDistances[cascadeIndex] = sliceData.splitData.cullingSphere;
m_CascadeSlices[cascadeIndex] = sliceData;
if (!shadowCullingInfos.IsSliceValid(cascadeIndex))
return SetupForEmptyRendering(stripShadowsOffVariants, shadowsEnabled, light, cameraData, shadowData);
}
UpdateTextureDescriptorIfNeeded();
m_MaxShadowDistanceSq = cameraData.maxShadowDistance * cameraData.maxShadowDistance;
m_CascadeBorder = shadowData.mainLightShadowCascadeBorder;
m_CreateEmptyShadowmap = false;
useNativeRenderPass = true;
return true;
}
private void UpdateTextureDescriptorIfNeeded()
{
if ( m_MainLightShadowDescriptor.width != renderTargetWidth
|| m_MainLightShadowDescriptor.height != renderTargetHeight
|| m_MainLightShadowDescriptor.depthBufferBits != k_ShadowmapBufferBits
|| m_MainLightShadowDescriptor.colorFormat != RenderTextureFormat.Shadowmap)
{
m_MainLightShadowDescriptor = new RenderTextureDescriptor(renderTargetWidth, renderTargetHeight, RenderTextureFormat.Shadowmap, k_ShadowmapBufferBits);
}
}
bool SetupForEmptyRendering(bool stripShadowsOffVariants, bool shadowsEnabled, Light light, UniversalCameraData cameraData, UniversalShadowData shadowData)
{
if (!stripShadowsOffVariants)
return false;
m_CreateEmptyShadowmap = true;
useNativeRenderPass = false;
m_SetKeywordForEmptyShadowmap = shadowsEnabled;
// Even though there are not real-time shadows, the light might be using shadowmasks,
// which is why we need to update the shadow parameters, for example so shadow strength can be used.
if (light == null)
{
s_EmptyShadowParams = new Vector4(0, 0, 1, 0);
}
else
{
bool supportsSoftShadows = shadowData.supportsSoftShadows;
float maxShadowDistanceSq = cameraData.maxShadowDistance;
float mainLightShadowCascadeBorder = shadowData.mainLightShadowCascadeBorder;
bool softShadows = light.shadows == LightShadows.Soft && supportsSoftShadows;
float softShadowsProp = ShadowUtils.SoftShadowQualityToShaderProperty(light, softShadows);
ShadowUtils.GetScaleAndBiasForLinearDistanceFade(maxShadowDistanceSq, mainLightShadowCascadeBorder, out float shadowFadeScale, out float shadowFadeBias);
s_EmptyShadowParams = new Vector4(light.shadowStrength, softShadowsProp, shadowFadeScale, shadowFadeBias);
}
return true;
}
/// <inheritdoc />
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
if (m_CreateEmptyShadowmap)
{
// Required for scene view camera(URP renderer not initialized)
if (ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_EmptyMainLightShadowmapTexture, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_ShadowmapBufferBits, name: k_EmptyMainLightShadowMapTextureName))
m_EmptyShadowmapNeedsClear = true;
if (!m_EmptyShadowmapNeedsClear)
return;
ConfigureTarget(m_EmptyMainLightShadowmapTexture);
m_EmptyShadowmapNeedsClear = false;
}
else
{
ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_MainLightShadowmapTexture, renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits, name: k_MainLightShadowMapTextureName);
ConfigureTarget(m_MainLightShadowmapTexture);
}
ConfigureClear(ClearFlag.All, Color.black);
#pragma warning restore CS0618
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
RasterCommandBuffer rasterCommandBuffer = CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer);
if (m_CreateEmptyShadowmap)
{
if (m_SetKeywordForEmptyShadowmap)
rasterCommandBuffer.EnableKeyword(ShaderGlobalKeywords.MainLightShadows);
SetShadowParamsForEmptyShadowmap(rasterCommandBuffer);
universalRenderingData.commandBuffer.SetGlobalTexture(MainLightShadowConstantBuffer._MainLightShadowmapID, m_EmptyMainLightShadowmapTexture.nameID);
return;
}
InitPassData(ref m_PassData, universalRenderingData, cameraData, lightData, shadowData);
InitRendererLists(ref m_PassData, context, default(RenderGraph), false);
RenderMainLightCascadeShadowmap(rasterCommandBuffer, ref m_PassData, false);
universalRenderingData.commandBuffer.SetGlobalTexture(MainLightShadowConstantBuffer._MainLightShadowmapID, m_MainLightShadowmapTexture.nameID);
}
void Clear()
{
for (int i = 0; i < m_MainLightShadowMatrices.Length; ++i)
m_MainLightShadowMatrices[i] = Matrix4x4.identity;
for (int i = 0; i < m_CascadeSplitDistances.Length; ++i)
m_CascadeSplitDistances[i] = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);
for (int i = 0; i < m_CascadeSlices.Length; ++i)
m_CascadeSlices[i].Clear();
}
internal static void SetShadowParamsForEmptyShadowmap(RasterCommandBuffer rasterCommandBuffer)
{
rasterCommandBuffer.SetGlobalVector(MainLightShadowConstantBuffer._ShadowmapSize, s_EmptyShadowmapSize);
rasterCommandBuffer.SetGlobalVector(MainLightShadowConstantBuffer._ShadowParams, s_EmptyShadowParams);
}
void RenderMainLightCascadeShadowmap(RasterCommandBuffer cmd, ref PassData data, bool isRenderGraph)
{
var lightData = data.lightData;
int shadowLightIndex = lightData.mainLightIndex;
if (shadowLightIndex == -1)
return;
VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex];
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow)))
{
// Need to start by setting the Camera position and worldToCamera Matrix as that is not set for passes executed before normal rendering
ShadowUtils.SetCameraPosition(cmd, data.cameraData.worldSpaceCameraPos);
// For non-RG, need set the worldToCamera Matrix as that is not set for passes executed before normal rendering,
// otherwise shadows will behave incorrectly when Scene and Game windows are open at the same time (UUM-63267).
if (!isRenderGraph)
ShadowUtils.SetWorldToCameraAndCameraToWorldMatrices(cmd, data.cameraData.GetViewMatrix());
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, shadowLightIndex, data.shadowData, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].resolution);
ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias);
cmd.SetKeyword(ShaderGlobalKeywords.CastingPunctualLightShadow, false);
RendererList shadowRendererList = isRenderGraph? data.shadowRendererListsHandle[cascadeIndex] : data.shadowRendererLists[cascadeIndex];
ShadowUtils.RenderShadowSlice(cmd, ref m_CascadeSlices[cascadeIndex], ref shadowRendererList, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].viewMatrix);
}
data.shadowData.isKeywordSoftShadowsEnabled = shadowLight.light.shadows == LightShadows.Soft && data.shadowData.supportsSoftShadows;
cmd.SetKeyword(ShaderGlobalKeywords.MainLightShadows, data.shadowData.mainLightShadowCascadesCount == 1);
cmd.SetKeyword(ShaderGlobalKeywords.MainLightShadowCascades, data.shadowData.mainLightShadowCascadesCount > 1);
ShadowUtils.SetSoftShadowQualityShaderKeywords(cmd, data.shadowData);
SetupMainLightShadowReceiverConstants(cmd, ref shadowLight, data.shadowData);
}
}
void SetupMainLightShadowReceiverConstants(RasterCommandBuffer cmd, ref VisibleLight shadowLight, UniversalShadowData shadowData)
{
Light light = shadowLight.light;
bool softShadows = shadowLight.light.shadows == LightShadows.Soft && shadowData.supportsSoftShadows;
int cascadeCount = m_ShadowCasterCascadesCount;
for (int i = 0; i < cascadeCount; ++i)
m_MainLightShadowMatrices[i] = m_CascadeSlices[i].shadowTransform;
// We setup and additional a no-op WorldToShadow matrix in the last index
// because the ComputeCascadeIndex function in Shadows.hlsl can return an index
// out of bounds. (position not inside any cascade) and we want to avoid branching
Matrix4x4 noOpShadowMatrix = Matrix4x4.zero;
noOpShadowMatrix.m22 = (SystemInfo.usesReversedZBuffer) ? 1.0f : 0.0f;
for (int i = cascadeCount; i <= k_MaxCascades; ++i)
m_MainLightShadowMatrices[i] = noOpShadowMatrix;
float invShadowAtlasWidth = 1.0f / renderTargetWidth;
float invShadowAtlasHeight = 1.0f / renderTargetHeight;
float invHalfShadowAtlasWidth = 0.5f * invShadowAtlasWidth;
float invHalfShadowAtlasHeight = 0.5f * invShadowAtlasHeight;
float softShadowsProp = ShadowUtils.SoftShadowQualityToShaderProperty(light, softShadows);
ShadowUtils.GetScaleAndBiasForLinearDistanceFade(m_MaxShadowDistanceSq, m_CascadeBorder, out float shadowFadeScale, out float shadowFadeBias);
cmd.SetGlobalMatrixArray(MainLightShadowConstantBuffer._WorldToShadow, m_MainLightShadowMatrices);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowParams,
new Vector4(light.shadowStrength, softShadowsProp, shadowFadeScale, shadowFadeBias));
if (m_ShadowCasterCascadesCount > 1)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres0,
m_CascadeSplitDistances[0]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres1,
m_CascadeSplitDistances[1]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres2,
m_CascadeSplitDistances[2]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres3,
m_CascadeSplitDistances[3]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSphereRadii, new Vector4(
m_CascadeSplitDistances[0].w * m_CascadeSplitDistances[0].w,
m_CascadeSplitDistances[1].w * m_CascadeSplitDistances[1].w,
m_CascadeSplitDistances[2].w * m_CascadeSplitDistances[2].w,
m_CascadeSplitDistances[3].w * m_CascadeSplitDistances[3].w));
}
// Inside shader soft shadows are controlled through global keyword.
// If any additional light has soft shadows it will force soft shadows on main light too.
// As it is not trivial finding out which additional light has soft shadows, we will pass main light properties if soft shadows are supported.
// This workaround will be removed once we will support soft shadows per light.
if (shadowData.supportsSoftShadows)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset0,
new Vector4(-invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight,
invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset1,
new Vector4(-invHalfShadowAtlasWidth, invHalfShadowAtlasHeight,
invHalfShadowAtlasWidth, invHalfShadowAtlasHeight));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowmapSize, new Vector4(invShadowAtlasWidth,
invShadowAtlasHeight,
renderTargetWidth, renderTargetHeight));
}
}
private void InitPassData(
ref PassData passData,
UniversalRenderingData renderingData,
UniversalCameraData cameraData,
UniversalLightData lightData,
UniversalShadowData shadowData)
{
passData.pass = this;
passData.emptyShadowmap = m_CreateEmptyShadowmap;
passData.setKeywordForEmptyShadowmap = m_SetKeywordForEmptyShadowmap;
passData.renderingData = renderingData;
passData.cameraData = cameraData;
passData.lightData = lightData;
passData.shadowData = shadowData;
}
private void InitRendererLists(ref PassData passData, ScriptableRenderContext context, RenderGraph renderGraph, bool useRenderGraph)
{
int shadowLightIndex = passData.lightData.mainLightIndex;
if (!m_CreateEmptyShadowmap && shadowLightIndex != -1)
{
ShadowDrawingSettings settings = new (passData.renderingData.cullResults, shadowLightIndex) {
useRenderingLayerMaskTest = UniversalRenderPipeline.asset.useRenderingLayers
};
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
if (useRenderGraph)
passData.shadowRendererListsHandle[cascadeIndex] = renderGraph.CreateShadowRendererList(ref settings);
else
passData.shadowRendererLists[cascadeIndex] = context.CreateShadowRendererList(ref settings);
}
}
}
internal TextureHandle Render(RenderGraph graph, ContextContainer frameData)
{
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
TextureHandle shadowTexture;
using (var builder = graph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
InitPassData(ref passData, renderingData, cameraData, lightData, shadowData);
InitRendererLists(ref passData, default(ScriptableRenderContext), graph, true);
if (!m_CreateEmptyShadowmap)
{
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
builder.UseRendererList(passData.shadowRendererListsHandle[cascadeIndex]);
}
shadowTexture = UniversalRenderer.CreateRenderGraphTexture(graph, m_MainLightShadowDescriptor, k_MainLightShadowMapTextureName, true, ShadowUtils.m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear);
builder.SetRenderAttachmentDepth(shadowTexture, AccessFlags.Write);
}
else
{
shadowTexture = graph.defaultResources.defaultShadowTexture;
}
builder.AllowGlobalStateModification(true);
if (shadowTexture.IsValid())
builder.SetGlobalTextureAfterPass(shadowTexture, MainLightShadowConstantBuffer._MainLightShadowmapID);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
RasterCommandBuffer rasterCommandBuffer = context.cmd;
if (!data.emptyShadowmap)
{
data.pass.RenderMainLightCascadeShadowmap(rasterCommandBuffer, ref data, true);
}
else
{
if (data.setKeywordForEmptyShadowmap)
rasterCommandBuffer.EnableKeyword(ShaderGlobalKeywords.MainLightShadows);
SetShadowParamsForEmptyShadowmap(rasterCommandBuffer);
}
});
}
return shadowTexture;
}
};
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 51015cd4dddd59d4b95f01cf645067bd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,290 @@
using System;
using Unity.Collections;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
sealed class MotionVectorRenderPass : ScriptableRenderPass
{
#region Fields
internal const string k_MotionVectorTextureName = "_MotionVectorTexture";
internal const string k_MotionVectorDepthTextureName = "_MotionVectorDepthTexture";
internal const GraphicsFormat k_TargetFormat = GraphicsFormat.R16G16_SFloat;
public const string k_MotionVectorsLightModeTag = "MotionVectors";
static readonly string[] s_ShaderTags = new string[] { k_MotionVectorsLightModeTag };
static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
static readonly ProfilingSampler s_SetMotionMatrixProfilingSampler = new ProfilingSampler("Set Motion Vector Global Matrices");
RTHandle m_Color;
RTHandle m_Depth;
readonly Material m_CameraMaterial;
readonly FilteringSettings m_FilteringSettings;
private PassData m_PassData;
#endregion
#region Constructors
internal MotionVectorRenderPass(RenderPassEvent evt, Material cameraMaterial, LayerMask opaqueLayerMask)
{
profilingSampler = ProfilingSampler.Get(URPProfileId.DrawMotionVectors);
renderPassEvent = evt;
m_CameraMaterial = cameraMaterial;
m_FilteringSettings = new FilteringSettings(RenderQueueRange.opaque,opaqueLayerMask);
m_PassData = new PassData();
ConfigureInput(ScriptableRenderPassInput.Depth);
}
#endregion
#region State
internal void Setup(RTHandle color, RTHandle depth)
{
m_Color = color;
m_Depth = depth;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
cmd.SetGlobalTexture(m_Color.name, m_Color.nameID);
cmd.SetGlobalTexture(m_Depth.name, m_Depth.nameID);
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(m_Color, m_Depth);
ConfigureClear(ClearFlag.Color | ClearFlag.Depth, Color.black);
// Can become a Store based on 'StoreActionsOptimization.Auto' and/or if a user RendererFeature is added.
// We need to keep the MotionVecDepth in case of a user wants to extend the motion vectors
// using a custom RendererFeature.
ConfigureDepthStoreAction(RenderBufferStoreAction.DontCare);
#pragma warning restore CS0618
}
#endregion
#region Execution
private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RendererList rendererList)
{
var cameraMaterial = passData.cameraMaterial;
if (cameraMaterial == null)
return;
// Get data
Camera camera = passData.camera;
// Never draw in Preview
if (camera.cameraType == CameraType.Preview)
return;
// These flags are still required in SRP or the engine won't compute previous model matrices...
// If the flag hasn't been set yet on this camera, motion vectors will skip a frame.
camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
// TODO: add option to only draw either one?
DrawCameraMotionVectors(cmd, passData.xr, cameraMaterial);
DrawObjectMotionVectors(cmd, passData.xr, ref rendererList);
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
var cmd = CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer);
// Profiling command
using (new ProfilingScope(cmd,profilingSampler))
{
InitPassData(ref m_PassData, cameraData);
InitRendererLists(ref m_PassData, ref universalRenderingData.cullResults, universalRenderingData.supportsDynamicBatching,
context, default(RenderGraph), false);
ExecutePass(cmd, m_PassData, m_PassData.rendererList);
}
}
private static DrawingSettings GetDrawingSettings(Camera camera, bool supportsDynamicBatching)
{
var sortingSettings = new SortingSettings(camera) { criteria = SortingCriteria.CommonOpaque };
var drawingSettings = new DrawingSettings(ShaderTagId.none, sortingSettings)
{
perObjectData = PerObjectData.MotionVectors,
enableDynamicBatching = supportsDynamicBatching,
enableInstancing = true,
lodCrossFadeStencilMask = 0, // Disable stencil-based lod because depth copy before motion vector pass doesn't copy stencils.
};
for (int i = 0; i < s_ShaderTags.Length; ++i)
{
drawingSettings.SetShaderPassName(i, new ShaderTagId(s_ShaderTags[i]));
}
return drawingSettings;
}
// NOTE: depends on camera depth to reconstruct static geometry positions
private static void DrawCameraMotionVectors(RasterCommandBuffer cmd, XRPass xr, Material cameraMaterial)
{
#if ENABLE_VR && ENABLE_XR_MODULE
bool foveatedRendering = xr.supportsFoveatedRendering;
bool nonUniformFoveatedRendering = foveatedRendering && XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster);
if (foveatedRendering)
{
if (nonUniformFoveatedRendering)
// This is a screen-space pass, make sure foveated rendering is disabled for non-uniform renders
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
else
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Enabled);
}
#endif
// Draw fullscreen quad
cmd.DrawProcedural(Matrix4x4.identity, cameraMaterial, 0, MeshTopology.Triangles, 3, 1);
#if ENABLE_VR && ENABLE_XR_MODULE
if (foveatedRendering && !nonUniformFoveatedRendering)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
#endif
}
private static void DrawObjectMotionVectors(RasterCommandBuffer cmd, XRPass xr, ref RendererList rendererList)
{
#if ENABLE_VR && ENABLE_XR_MODULE
bool foveatedRendering = xr.supportsFoveatedRendering;
if (foveatedRendering)
// This is a geometry pass, enable foveated rendering (we need to disable it after)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Enabled);
#endif
cmd.DrawRendererList(rendererList);
#if ENABLE_VR && ENABLE_XR_MODULE
if (foveatedRendering)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
#endif
}
#endregion
/// <summary>
/// Shared pass data
/// </summary>
private class PassData
{
internal Camera camera;
internal XRPass xr;
internal TextureHandle motionVectorColor;
internal TextureHandle motionVectorDepth;
internal TextureHandle cameraDepth;
internal Material cameraMaterial;
internal RendererListHandle rendererListHdl;
// Required for code sharing purpose between RG and non-RG.
internal RendererList rendererList;
}
/// <summary>
/// Initialize the shared pass data.
/// </summary>
/// <param name="passData"></param>
private void InitPassData(ref PassData passData, UniversalCameraData cameraData)
{
passData.camera = cameraData.camera;
passData.xr = cameraData.xr;
passData.cameraMaterial = m_CameraMaterial;
}
private void InitRendererLists(ref PassData passData, ref CullingResults cullResults, bool supportsDynamicBatching, ScriptableRenderContext context, RenderGraph renderGraph, bool useRenderGraph)
{
var drawingSettings = GetDrawingSettings(passData.camera, supportsDynamicBatching);
var renderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
if (useRenderGraph)
RenderingUtils.CreateRendererListWithRenderStateBlock(renderGraph, ref cullResults, drawingSettings, m_FilteringSettings, renderStateBlock, ref passData.rendererListHdl);
else
RenderingUtils.CreateRendererListWithRenderStateBlock(context, ref cullResults, drawingSettings, m_FilteringSettings, renderStateBlock, ref passData.rendererList);
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraDepthTexture, TextureHandle motionVectorColor, TextureHandle motionVectorDepth)
{
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
builder.UseAllGlobalTextures(true);
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && cameraData.xrUniversal.canFoveateIntermediatePasses);
passData.motionVectorColor = motionVectorColor;
builder.SetRenderAttachment(motionVectorColor, 0, AccessFlags.Write);
passData.motionVectorDepth = motionVectorDepth;
builder.SetRenderAttachmentDepth(motionVectorDepth, AccessFlags.Write);
InitPassData(ref passData, cameraData);
passData.cameraDepth = cameraDepthTexture;
builder.UseTexture(cameraDepthTexture, AccessFlags.Read);
InitRendererLists(ref passData, ref renderingData.cullResults, renderingData.supportsDynamicBatching,
default(ScriptableRenderContext), renderGraph, true);
builder.UseRendererList(passData.rendererListHdl);
if (motionVectorColor.IsValid())
builder.SetGlobalTextureAfterPass(motionVectorColor, Shader.PropertyToID(k_MotionVectorTextureName));
if (motionVectorDepth.IsValid())
builder.SetGlobalTextureAfterPass(motionVectorDepth, Shader.PropertyToID(k_MotionVectorDepthTextureName));
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
if (data.cameraMaterial != null)
data.cameraMaterial.SetTexture(s_CameraDepthTextureID, data.cameraDepth);
ExecutePass(context.cmd, data, data.rendererListHdl);
});
}
}
// Global motion vector matrix setup pass.
// Used for MotionVector passes and also read in VFX early compute shader
public class MotionMatrixPassData
{
public MotionVectorsPersistentData motionData;
public XRPass xr;
};
internal static void SetMotionVectorGlobalMatrices(CommandBuffer cmd, UniversalCameraData cameraData)
{
if (cameraData.camera.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
{
additionalCameraData.motionVectorsPersistentData?.SetGlobalMotionMatrices(CommandBufferHelpers.GetRasterCommandBuffer(cmd), cameraData.xr);
}
}
internal static void SetRenderGraphMotionVectorGlobalMatrices(RenderGraph renderGraph, UniversalCameraData cameraData)
{
if (cameraData.camera.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
{
using (var builder = renderGraph.AddRasterRenderPass<MotionMatrixPassData>(s_SetMotionMatrixProfilingSampler.name, out var passData, s_SetMotionMatrixProfilingSampler))
{
passData.motionData = additionalCameraData.motionVectorsPersistentData;
passData.xr = cameraData.xr;
builder.AllowGlobalStateModification(true);
builder.SetRenderFunc(static (MotionMatrixPassData data, RasterGraphContext context) =>
{
data.motionData.SetGlobalMotionMatrices(context.cmd, data.xr);
});
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 096294dfb309e47929b561541e4b087e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 858a96c3295017349ab0f956b9883bb9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7d7d4ecff4c4b59488b97a13efb52739
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Uses a compute shader to capture the depth and normal of the pixel under the cursor.
/// </summary>
internal partial class ProbeVolumeDebugPass : ScriptableRenderPass
{
ComputeShader m_ComputeShader;
RTHandle m_DepthTexture;
RTHandle m_NormalTexture;
/// <summary>
/// Creates a new <c>ProbeVolumeDebugPass</c> instance.
/// </summary>
public ProbeVolumeDebugPass(RenderPassEvent evt, ComputeShader computeShader)
{
base.profilingSampler = new ProfilingSampler("Dispatch APV Debug");
renderPassEvent = evt;
m_ComputeShader = computeShader;
}
public void Setup(RTHandle depthBuffer, RTHandle normalBuffer)
{
m_DepthTexture = depthBuffer;
m_NormalTexture = normalBuffer;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (!ProbeReferenceVolume.instance.isInitialized)
return;
ref CameraData cameraData = ref renderingData.cameraData;
if (ProbeReferenceVolume.instance.GetProbeSamplingDebugResources(cameraData.camera, out var resultBuffer, out Vector2 coords))
{
var cmd = renderingData.commandBuffer;
int kernel = m_ComputeShader.FindKernel("ComputePositionNormal");
cmd.SetComputeTextureParam(m_ComputeShader, kernel, "_CameraDepthTexture", m_DepthTexture);
cmd.SetComputeTextureParam(m_ComputeShader, kernel, "_NormalBufferTexture", m_NormalTexture);
cmd.SetComputeVectorParam(m_ComputeShader, "_positionSS", new Vector4(coords.x, coords.y, 0.0f, 0.0f));
cmd.SetComputeBufferParam(m_ComputeShader, kernel, "_ResultBuffer", resultBuffer);
cmd.DispatchCompute(m_ComputeShader, kernel, 1, 1, 1);
}
}
class WriteApvData
{
public ComputeShader computeShader;
public BufferHandle resultBuffer;
public Vector2 clickCoordinates;
public TextureHandle depthBuffer;
public TextureHandle normalBuffer;
}
/// <summary>
/// Render graph entry point
/// </summary>
/// <param name="renderGraph"></param>
/// <param name="renderingData"></param>
/// <param name="depthPyramidBuffer"></param>
/// <param name="normalBuffer"></param>
internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle depthPyramidBuffer, TextureHandle normalBuffer)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
if (!ProbeReferenceVolume.instance.isInitialized)
return;
if (ProbeReferenceVolume.instance.GetProbeSamplingDebugResources(cameraData.camera, out var resultBuffer, out Vector2 coords))
{
using (var builder = renderGraph.AddComputePass<WriteApvData>(passName, out var passData, profilingSampler))
{
passData.clickCoordinates = coords;
passData.computeShader = m_ComputeShader;
passData.resultBuffer = renderGraph.ImportBuffer(resultBuffer);
passData.depthBuffer = depthPyramidBuffer;
passData.normalBuffer = normalBuffer;
builder.UseBuffer(passData.resultBuffer, AccessFlags.Write);
builder.UseTexture(passData.depthBuffer, AccessFlags.Read);
builder.UseTexture(passData.normalBuffer, AccessFlags.Read);
builder.SetRenderFunc((WriteApvData data, ComputeGraphContext ctx) =>
{
int kernel = data.computeShader.FindKernel("ComputePositionNormal");
ctx.cmd.SetComputeTextureParam(data.computeShader, kernel, "_CameraDepthTexture", data.depthBuffer);
ctx.cmd.SetComputeTextureParam(data.computeShader, kernel, "_NormalBufferTexture", data.normalBuffer);
ctx.cmd.SetComputeVectorParam(data.computeShader, "_positionSS", new Vector4(data.clickCoordinates.x, data.clickCoordinates.y, 0.0f, 0.0f));
ctx.cmd.SetComputeBufferParam(data.computeShader, kernel, "_ResultBuffer", data.resultBuffer);
ctx.cmd.DispatchCompute(data.computeShader, kernel, 1, 1, 1);
});
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 14a56f221d48d58419103d8e8c070dd9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,329 @@
using System;
using System.Collections.Generic;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// The scriptable render pass used with the render objects renderer feature.
/// </summary>
[MovedFrom(true, "UnityEngine.Experimental.Rendering.Universal")]
public class RenderObjectsPass : ScriptableRenderPass
{
RenderQueueType renderQueueType;
FilteringSettings m_FilteringSettings;
RenderObjects.CustomCameraSettings m_CameraSettings;
/// <summary>
/// The override material to use.
/// </summary>
public Material overrideMaterial { get; set; }
/// <summary>
/// The pass index to use with the override material.
/// </summary>
public int overrideMaterialPassIndex { get; set; }
/// <summary>
/// The override shader to use.
/// </summary>
public Shader overrideShader { get; set; }
/// <summary>
/// The pass index to use with the override shader.
/// </summary>
public int overrideShaderPassIndex { get; set; }
List<ShaderTagId> m_ShaderTagIdList = new List<ShaderTagId>();
private PassData m_PassData;
/// <summary>
/// Sets the write and comparison function for depth.
/// </summary>
/// <param name="writeEnabled">Sets whether it should write to depth or not.</param>
/// <param name="function">The depth comparison function to use.</param>
[Obsolete("Use SetDepthState instead", true)]
public void SetDetphState(bool writeEnabled, CompareFunction function = CompareFunction.Less)
{
SetDepthState(writeEnabled, function);
}
/// <summary>
/// Sets the write and comparison function for depth.
/// </summary>
/// <param name="writeEnabled">Sets whether it should write to depth or not.</param>
/// <param name="function">The depth comparison function to use.</param>
public void SetDepthState(bool writeEnabled, CompareFunction function = CompareFunction.Less)
{
m_RenderStateBlock.mask |= RenderStateMask.Depth;
m_RenderStateBlock.depthState = new DepthState(writeEnabled, function);
}
/// <summary>
/// Sets up the stencil settings for the pass.
/// </summary>
/// <param name="reference">The stencil reference value.</param>
/// <param name="compareFunction">The comparison function to use.</param>
/// <param name="passOp">The stencil operation to use when the stencil test passes.</param>
/// <param name="failOp">The stencil operation to use when the stencil test fails.</param>
/// <param name="zFailOp">The stencil operation to use when the stencil test fails because of depth.</param>
public void SetStencilState(int reference, CompareFunction compareFunction, StencilOp passOp, StencilOp failOp, StencilOp zFailOp)
{
StencilState stencilState = StencilState.defaultValue;
stencilState.enabled = true;
stencilState.SetCompareFunction(compareFunction);
stencilState.SetPassOperation(passOp);
stencilState.SetFailOperation(failOp);
stencilState.SetZFailOperation(zFailOp);
m_RenderStateBlock.mask |= RenderStateMask.Stencil;
m_RenderStateBlock.stencilReference = reference;
m_RenderStateBlock.stencilState = stencilState;
}
RenderStateBlock m_RenderStateBlock;
/// <summary>
/// The constructor for render objects pass.
/// </summary>
/// <param name="profilerTag">The profiler tag used with the pass.</param>
/// <param name="renderPassEvent">Controls when the render pass executes.</param>
/// <param name="shaderTags">List of shader tags to render with.</param>
/// <param name="renderQueueType">The queue type for the objects to render.</param>
/// <param name="layerMask">The layer mask to use for creating filtering settings that control what objects get rendered.</param>
/// <param name="cameraSettings">The settings for custom cameras values.</param>
public RenderObjectsPass(string profilerTag, RenderPassEvent renderPassEvent, string[] shaderTags, RenderQueueType renderQueueType, int layerMask, RenderObjects.CustomCameraSettings cameraSettings)
{
profilingSampler = new ProfilingSampler(profilerTag);
Init(renderPassEvent, shaderTags, renderQueueType, layerMask, cameraSettings);
}
internal RenderObjectsPass(URPProfileId profileId, RenderPassEvent renderPassEvent, string[] shaderTags, RenderQueueType renderQueueType, int layerMask, RenderObjects.CustomCameraSettings cameraSettings)
{
profilingSampler = ProfilingSampler.Get(profileId);
Init(renderPassEvent, shaderTags, renderQueueType, layerMask, cameraSettings);
}
internal void Init(RenderPassEvent renderPassEvent, string[] shaderTags, RenderQueueType renderQueueType, int layerMask, RenderObjects.CustomCameraSettings cameraSettings)
{
m_PassData = new PassData();
this.renderPassEvent = renderPassEvent;
this.renderQueueType = renderQueueType;
this.overrideMaterial = null;
this.overrideMaterialPassIndex = 0;
this.overrideShader = null;
this.overrideShaderPassIndex = 0;
RenderQueueRange renderQueueRange = (renderQueueType == RenderQueueType.Transparent)
? RenderQueueRange.transparent
: RenderQueueRange.opaque;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
if (shaderTags != null && shaderTags.Length > 0)
{
foreach (var tag in shaderTags)
m_ShaderTagIdList.Add(new ShaderTagId(tag));
}
else
{
m_ShaderTagIdList.Add(new ShaderTagId("SRPDefaultUnlit"));
m_ShaderTagIdList.Add(new ShaderTagId("UniversalForward"));
m_ShaderTagIdList.Add(new ShaderTagId("UniversalForwardOnly"));
}
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_CameraSettings = cameraSettings;
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
UniversalRenderingData universalRenderingData = renderingData.frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
UniversalLightData lightData = renderingData.frameData.Get<UniversalLightData>();
var cmd = CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer);
using (new ProfilingScope(cmd, profilingSampler))
{
InitPassData(cameraData, ref m_PassData);
InitRendererLists(universalRenderingData, lightData, ref m_PassData, context, default(RenderGraph), false);
ExecutePass(m_PassData, cmd , m_PassData.rendererList, renderingData.cameraData.IsCameraProjectionMatrixFlipped());
}
}
private static void ExecutePass(PassData passData, RasterCommandBuffer cmd, RendererList rendererList, bool isYFlipped)
{
Camera camera = passData.cameraData.camera;
// In case of camera stacking we need to take the viewport rect from base camera
Rect pixelRect = passData.cameraData.pixelRect;
float cameraAspect = (float)pixelRect.width / (float)pixelRect.height;
if (passData.cameraSettings.overrideCamera)
{
if (passData.cameraData.xr.enabled)
{
Debug.LogWarning("RenderObjects pass is configured to override camera matrices. While rendering in stereo camera matrices cannot be overridden.");
}
else
{
Matrix4x4 projectionMatrix = Matrix4x4.Perspective(passData.cameraSettings.cameraFieldOfView, cameraAspect,
camera.nearClipPlane, camera.farClipPlane);
projectionMatrix = GL.GetGPUProjectionMatrix(projectionMatrix, isYFlipped);
Matrix4x4 viewMatrix = passData.cameraData.GetViewMatrix();
Vector4 cameraTranslation = viewMatrix.GetColumn(3);
viewMatrix.SetColumn(3, cameraTranslation + passData.cameraSettings.offset);
RenderingUtils.SetViewAndProjectionMatrices(cmd, viewMatrix, projectionMatrix, false);
}
}
var activeDebugHandler = GetActiveDebugHandler(passData.cameraData);
if (activeDebugHandler != null)
{
passData.debugRendererLists.DrawWithRendererList(cmd);
}
else
{
cmd.DrawRendererList(rendererList);
}
if (passData.cameraSettings.overrideCamera && passData.cameraSettings.restoreCamera && !passData.cameraData.xr.enabled)
{
RenderingUtils.SetViewAndProjectionMatrices(cmd, passData.cameraData.GetViewMatrix(), GL.GetGPUProjectionMatrix(passData.cameraData.GetProjectionMatrix(0), isYFlipped), false);
}
}
private class PassData
{
internal RenderObjects.CustomCameraSettings cameraSettings;
internal RenderPassEvent renderPassEvent;
internal TextureHandle color;
internal RendererListHandle rendererListHdl;
internal DebugRendererLists debugRendererLists;
internal UniversalCameraData cameraData;
// Required for code sharing purpose between RG and non-RG.
internal RendererList rendererList;
}
private void InitPassData(UniversalCameraData cameraData, ref PassData passData)
{
passData.cameraSettings = m_CameraSettings;
passData.renderPassEvent = renderPassEvent;
passData.cameraData = cameraData;
}
private void InitRendererLists(UniversalRenderingData renderingData, UniversalLightData lightData,
ref PassData passData, ScriptableRenderContext context, RenderGraph renderGraph, bool useRenderGraph)
{
SortingCriteria sortingCriteria = (renderQueueType == RenderQueueType.Transparent)
? SortingCriteria.CommonTransparent
: passData.cameraData.defaultOpaqueSortFlags;
DrawingSettings drawingSettings = RenderingUtils.CreateDrawingSettings(m_ShaderTagIdList, renderingData,
passData.cameraData, lightData, sortingCriteria);
drawingSettings.overrideMaterial = overrideMaterial;
drawingSettings.overrideMaterialPassIndex = overrideMaterialPassIndex;
drawingSettings.overrideShader = overrideShader;
drawingSettings.overrideShaderPassIndex = overrideShaderPassIndex;
var activeDebugHandler = GetActiveDebugHandler(passData.cameraData);
var filterSettings = m_FilteringSettings;
if (useRenderGraph)
{
if (activeDebugHandler != null)
{
passData.debugRendererLists = activeDebugHandler.CreateRendererListsWithDebugRenderState(renderGraph,
ref renderingData.cullResults, ref drawingSettings, ref m_FilteringSettings, ref m_RenderStateBlock);
}
else
{
RenderingUtils.CreateRendererListWithRenderStateBlock(renderGraph, ref renderingData.cullResults, drawingSettings,
m_FilteringSettings, m_RenderStateBlock, ref passData.rendererListHdl);
}
}
else
{
if (activeDebugHandler != null)
{
passData.debugRendererLists = activeDebugHandler.CreateRendererListsWithDebugRenderState(context, ref renderingData.cullResults, ref drawingSettings, ref m_FilteringSettings, ref m_RenderStateBlock);
}
else
{
RenderingUtils.CreateRendererListWithRenderStateBlock(context, ref renderingData.cullResults, drawingSettings, m_FilteringSettings, m_RenderStateBlock, ref passData.rendererList);
}
}
}
/// <inheritdoc />
public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
InitPassData(cameraData, ref passData);
passData.color = resourceData.activeColorTexture;
builder.SetRenderAttachment(resourceData.activeColorTexture, 0, AccessFlags.Write);
if (cameraData.imageScalingMode != ImageScalingMode.Upscaling || passData.renderPassEvent != RenderPassEvent.AfterRenderingPostProcessing)
builder.SetRenderAttachmentDepth(resourceData.activeDepthTexture, AccessFlags.Write);
TextureHandle mainShadowsTexture = resourceData.mainShadowsTexture;
TextureHandle additionalShadowsTexture = resourceData.additionalShadowsTexture;
if (mainShadowsTexture.IsValid())
builder.UseTexture(mainShadowsTexture, AccessFlags.Read);
if (additionalShadowsTexture.IsValid())
builder.UseTexture(additionalShadowsTexture, AccessFlags.Read);
TextureHandle[] dBufferHandles = resourceData.dBuffer;
for (int i = 0; i < dBufferHandles.Length; ++i)
{
TextureHandle dBuffer = dBufferHandles[i];
if (dBuffer.IsValid())
builder.UseTexture(dBuffer, AccessFlags.Read);
}
TextureHandle ssaoTexture = resourceData.ssaoTexture;
if (ssaoTexture.IsValid())
builder.UseTexture(ssaoTexture, AccessFlags.Read);
InitRendererLists(renderingData, lightData, ref passData, default(ScriptableRenderContext), renderGraph, true);
var activeDebugHandler = GetActiveDebugHandler(passData.cameraData);
if (activeDebugHandler != null)
{
passData.debugRendererLists.PrepareRendererListForRasterPass(builder);
}
else
{
builder.UseRendererList(passData.rendererListHdl);
}
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && cameraData.xrUniversal.canFoveateIntermediatePasses);
builder.SetRenderFunc((PassData data, RasterGraphContext rgContext) =>
{
var isYFlipped = data.cameraData.IsRenderTargetProjectionMatrixFlipped(data.color);
ExecutePass(data, rgContext.cmd, data.rendererListHdl, isYFlipped);
});
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: dbd10d839f22d4127aeb9851dfed8caa
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,639 @@
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
// The Screen Space Ambient Occlusion (SSAO) Pass
internal class ScreenSpaceAmbientOcclusionPass : ScriptableRenderPass
{
// Private Variables
private readonly bool m_SupportsR8RenderTextureFormat = SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.R8);
private int m_BlueNoiseTextureIndex = 0;
private Material m_Material;
private SSAOPassData m_PassData;
private Texture2D[] m_BlueNoiseTextures;
private Vector4[] m_CameraTopLeftCorner = new Vector4[2];
private Vector4[] m_CameraXExtent = new Vector4[2];
private Vector4[] m_CameraYExtent = new Vector4[2];
private Vector4[] m_CameraZExtent = new Vector4[2];
private RTHandle[] m_SSAOTextures = new RTHandle[4];
private BlurTypes m_BlurType = BlurTypes.Bilateral;
private Matrix4x4[] m_CameraViewProjections = new Matrix4x4[2];
private ProfilingSampler m_ProfilingSampler = ProfilingSampler.Get(URPProfileId.SSAO);
private ScriptableRenderer m_Renderer = null;
private RenderTextureDescriptor m_AOPassDescriptor;
private ScreenSpaceAmbientOcclusionSettings m_CurrentSettings;
// Constants
private const string k_SSAOTextureName = "_ScreenSpaceOcclusionTexture";
private const string k_AmbientOcclusionParamName = "_AmbientOcclusionParam";
// Statics
internal static readonly int s_AmbientOcclusionParamID = Shader.PropertyToID(k_AmbientOcclusionParamName);
private static readonly int s_SSAOParamsID = Shader.PropertyToID("_SSAOParams");
private static readonly int s_SSAOBlueNoiseParamsID = Shader.PropertyToID("_SSAOBlueNoiseParams");
private static readonly int s_BlueNoiseTextureID = Shader.PropertyToID("_BlueNoiseTexture");
private static readonly int s_SSAOFinalTextureID = Shader.PropertyToID(k_SSAOTextureName);
private static readonly int s_CameraViewXExtentID = Shader.PropertyToID("_CameraViewXExtent");
private static readonly int s_CameraViewYExtentID = Shader.PropertyToID("_CameraViewYExtent");
private static readonly int s_CameraViewZExtentID = Shader.PropertyToID("_CameraViewZExtent");
private static readonly int s_ProjectionParams2ID = Shader.PropertyToID("_ProjectionParams2");
private static readonly int s_CameraViewProjectionsID = Shader.PropertyToID("_CameraViewProjections");
private static readonly int s_CameraViewTopLeftCornerID = Shader.PropertyToID("_CameraViewTopLeftCorner");
private static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
private static readonly int s_CameraNormalsTextureID = Shader.PropertyToID("_CameraNormalsTexture");
private static readonly int[] m_BilateralTexturesIndices = { 0, 1, 2, 3 };
private static readonly ShaderPasses[] m_BilateralPasses = { ShaderPasses.BilateralBlurHorizontal, ShaderPasses.BilateralBlurVertical, ShaderPasses.BilateralBlurFinal };
private static readonly ShaderPasses[] m_BilateralAfterOpaquePasses = { ShaderPasses.BilateralBlurHorizontal, ShaderPasses.BilateralBlurVertical, ShaderPasses.BilateralAfterOpaque };
private static readonly int[] m_GaussianTexturesIndices = { 0, 1, 3, 3 };
private static readonly ShaderPasses[] m_GaussianPasses = { ShaderPasses.GaussianBlurHorizontal, ShaderPasses.GaussianBlurVertical };
private static readonly ShaderPasses[] m_GaussianAfterOpaquePasses = { ShaderPasses.GaussianBlurHorizontal, ShaderPasses.GaussianAfterOpaque };
private static readonly int[] m_KawaseTexturesIndices = { 0, 3 };
private static readonly ShaderPasses[] m_KawasePasses = { ShaderPasses.KawaseBlur };
private static readonly ShaderPasses[] m_KawaseAfterOpaquePasses = { ShaderPasses.KawaseAfterOpaque };
// Enums
private enum BlurTypes
{
Bilateral,
Gaussian,
Kawase,
}
private enum ShaderPasses
{
AmbientOcclusion = 0,
BilateralBlurHorizontal = 1,
BilateralBlurVertical = 2,
BilateralBlurFinal = 3,
BilateralAfterOpaque = 4,
GaussianBlurHorizontal = 5,
GaussianBlurVertical = 6,
GaussianAfterOpaque = 7,
KawaseBlur = 8,
KawaseAfterOpaque = 9,
}
// Structs
private struct SSAOMaterialParams
{
internal bool orthographicCamera;
internal bool aoBlueNoise;
internal bool aoInterleavedGradient;
internal bool sampleCountHigh;
internal bool sampleCountMedium;
internal bool sampleCountLow;
internal bool sourceDepthNormals;
internal bool sourceDepthHigh;
internal bool sourceDepthMedium;
internal bool sourceDepthLow;
internal Vector4 ssaoParams;
internal SSAOMaterialParams(ref ScreenSpaceAmbientOcclusionSettings settings, bool isOrthographic)
{
bool isUsingDepthNormals = settings.Source == ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals;
float radiusMultiplier = settings.AOMethod == ScreenSpaceAmbientOcclusionSettings.AOMethodOptions.BlueNoise ? 1.5f : 1;
orthographicCamera = isOrthographic;
aoBlueNoise = settings.AOMethod == ScreenSpaceAmbientOcclusionSettings.AOMethodOptions.BlueNoise;
aoInterleavedGradient = settings.AOMethod == ScreenSpaceAmbientOcclusionSettings.AOMethodOptions.InterleavedGradient;
sampleCountHigh = settings.Samples == ScreenSpaceAmbientOcclusionSettings.AOSampleOption.High;
sampleCountMedium = settings.Samples == ScreenSpaceAmbientOcclusionSettings.AOSampleOption.Medium;
sampleCountLow = settings.Samples == ScreenSpaceAmbientOcclusionSettings.AOSampleOption.Low;
sourceDepthNormals = settings.Source == ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals;
sourceDepthHigh = !isUsingDepthNormals && settings.NormalSamples == ScreenSpaceAmbientOcclusionSettings.NormalQuality.High;
sourceDepthMedium = !isUsingDepthNormals && settings.NormalSamples == ScreenSpaceAmbientOcclusionSettings.NormalQuality.Medium;
sourceDepthLow = !isUsingDepthNormals && settings.NormalSamples == ScreenSpaceAmbientOcclusionSettings.NormalQuality.Low;
ssaoParams = new Vector4(
settings.Intensity, // Intensity
settings.Radius * radiusMultiplier, // Radius
1.0f / (settings.Downsample ? 2 : 1), // Downsampling
settings.Falloff // Falloff
);
}
internal bool Equals(ref SSAOMaterialParams other)
{
return orthographicCamera == other.orthographicCamera
&& aoBlueNoise == other.aoBlueNoise
&& aoInterleavedGradient == other.aoInterleavedGradient
&& sampleCountHigh == other.sampleCountHigh
&& sampleCountMedium == other.sampleCountMedium
&& sampleCountLow == other.sampleCountLow
&& sourceDepthNormals == other.sourceDepthNormals
&& sourceDepthHigh == other.sourceDepthHigh
&& sourceDepthMedium == other.sourceDepthMedium
&& sourceDepthLow == other.sourceDepthLow
&& ssaoParams == other.ssaoParams
;
}
}
private SSAOMaterialParams m_SSAOParamsPrev = new SSAOMaterialParams();
internal ScreenSpaceAmbientOcclusionPass()
{
m_CurrentSettings = new ScreenSpaceAmbientOcclusionSettings();
m_PassData = new SSAOPassData();
}
internal bool Setup(ref ScreenSpaceAmbientOcclusionSettings featureSettings, ref ScriptableRenderer renderer, ref Material material, ref Texture2D[] blueNoiseTextures)
{
m_BlueNoiseTextures = blueNoiseTextures;
m_Material = material;
m_Renderer = renderer;
m_CurrentSettings = featureSettings;
// RenderPass Event + Source Settings (Depth / Depth&Normals
if (renderer is UniversalRenderer { usesDeferredLighting: true })
{
renderPassEvent = m_CurrentSettings.AfterOpaque ? RenderPassEvent.AfterRenderingOpaques : RenderPassEvent.AfterRenderingGbuffer;
m_CurrentSettings.Source = ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals;
}
else
{
// Rendering after PrePasses is usually correct except when depth priming is in play:
// then we rely on a depth resolve taking place after the PrePasses in order to have it ready for SSAO.
// Hence we set the event to RenderPassEvent.AfterRenderingPrePasses + 1 at the earliest.
renderPassEvent = m_CurrentSettings.AfterOpaque ? RenderPassEvent.BeforeRenderingTransparents : RenderPassEvent.AfterRenderingPrePasses + 1;
}
// Ask for a Depth or Depth + Normals textures
switch (m_CurrentSettings.Source)
{
case ScreenSpaceAmbientOcclusionSettings.DepthSource.Depth:
ConfigureInput(ScriptableRenderPassInput.Depth);
break;
case ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals:
ConfigureInput(ScriptableRenderPassInput.Depth | ScriptableRenderPassInput.Normal); // need depthNormal prepass for forward-only geometry
break;
default:
throw new ArgumentOutOfRangeException();
}
// Blur settings
switch (m_CurrentSettings.BlurQuality)
{
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.High:
m_BlurType = BlurTypes.Bilateral;
break;
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Medium:
m_BlurType = BlurTypes.Gaussian;
break;
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Low:
m_BlurType = BlurTypes.Kawase;
break;
default:
throw new ArgumentOutOfRangeException();
}
return m_Material != null
&& m_CurrentSettings.Intensity > 0.0f
&& m_CurrentSettings.Radius > 0.0f
&& m_CurrentSettings.Falloff > 0.0f;
}
private static bool IsAfterOpaquePass(ref ShaderPasses pass)
{
return pass == ShaderPasses.BilateralAfterOpaque
|| pass == ShaderPasses.GaussianAfterOpaque
|| pass == ShaderPasses.KawaseAfterOpaque;
}
private void SetupKeywordsAndParameters(ref ScreenSpaceAmbientOcclusionSettings settings, ref UniversalCameraData cameraData)
{
#if ENABLE_VR && ENABLE_XR_MODULE
int eyeCount = cameraData.xr.enabled && cameraData.xr.singlePassEnabled ? 2 : 1;
#else
int eyeCount = 1;
#endif
for (int eyeIndex = 0; eyeIndex < eyeCount; eyeIndex++)
{
Matrix4x4 view = cameraData.GetViewMatrix(eyeIndex);
Matrix4x4 proj = cameraData.GetProjectionMatrix(eyeIndex);
m_CameraViewProjections[eyeIndex] = proj * view;
// camera view space without translation, used by SSAO.hlsl ReconstructViewPos() to calculate view vector.
Matrix4x4 cview = view;
cview.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
Matrix4x4 cviewProj = proj * cview;
Matrix4x4 cviewProjInv = cviewProj.inverse;
Vector4 topLeftCorner = cviewProjInv.MultiplyPoint(new Vector4(-1, 1, -1, 1));
Vector4 topRightCorner = cviewProjInv.MultiplyPoint(new Vector4(1, 1, -1, 1));
Vector4 bottomLeftCorner = cviewProjInv.MultiplyPoint(new Vector4(-1, -1, -1, 1));
Vector4 farCentre = cviewProjInv.MultiplyPoint(new Vector4(0, 0, 1, 1));
m_CameraTopLeftCorner[eyeIndex] = topLeftCorner;
m_CameraXExtent[eyeIndex] = topRightCorner - topLeftCorner;
m_CameraYExtent[eyeIndex] = bottomLeftCorner - topLeftCorner;
m_CameraZExtent[eyeIndex] = farCentre;
}
m_Material.SetVector(s_ProjectionParams2ID, new Vector4(1.0f / cameraData.camera.nearClipPlane, 0.0f, 0.0f, 0.0f));
m_Material.SetMatrixArray(s_CameraViewProjectionsID, m_CameraViewProjections);
m_Material.SetVectorArray(s_CameraViewTopLeftCornerID, m_CameraTopLeftCorner);
m_Material.SetVectorArray(s_CameraViewXExtentID, m_CameraXExtent);
m_Material.SetVectorArray(s_CameraViewYExtentID, m_CameraYExtent);
m_Material.SetVectorArray(s_CameraViewZExtentID, m_CameraZExtent);
if (settings.AOMethod == ScreenSpaceAmbientOcclusionSettings.AOMethodOptions.BlueNoise)
{
m_BlueNoiseTextureIndex = (m_BlueNoiseTextureIndex + 1) % m_BlueNoiseTextures.Length;
Texture2D noiseTexture = m_BlueNoiseTextures[m_BlueNoiseTextureIndex];
Vector4 blueNoiseParams = new Vector4(
cameraData.pixelWidth / (float)m_BlueNoiseTextures[m_BlueNoiseTextureIndex].width, // X Scale
cameraData.pixelHeight / (float)m_BlueNoiseTextures[m_BlueNoiseTextureIndex].height, // Y Scale
Random.value, // X Offset
Random.value // Y Offset
);
// For testing we use a single blue noise texture and a single set of blue noise params.
#if UNITY_INCLUDE_TESTS
noiseTexture = m_BlueNoiseTextures[0];
blueNoiseParams.z = 1;
blueNoiseParams.w = 1;
#endif
m_Material.SetTexture(s_BlueNoiseTextureID, noiseTexture);
m_Material.SetVector(s_SSAOBlueNoiseParamsID, blueNoiseParams);
}
// Setting keywords can be somewhat expensive on low-end platforms.
// Previous params are cached to avoid setting the same keywords every frame.
SSAOMaterialParams matParams = new SSAOMaterialParams(ref settings, cameraData.camera.orthographic);
bool ssaoParamsDirty = !m_SSAOParamsPrev.Equals(ref matParams); // Checks if the parameters have changed.
bool isParamsPropertySet = m_Material.HasProperty(s_SSAOParamsID); // Checks if the parameters have been set on the material.
if (!ssaoParamsDirty && isParamsPropertySet)
return;
m_SSAOParamsPrev = matParams;
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_OrthographicCameraKeyword, matParams.orthographicCamera);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_AOBlueNoiseKeyword, matParams.aoBlueNoise);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_AOInterleavedGradientKeyword, matParams.aoInterleavedGradient);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SampleCountHighKeyword, matParams.sampleCountHigh);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SampleCountMediumKeyword, matParams.sampleCountMedium);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SampleCountLowKeyword, matParams.sampleCountLow);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SourceDepthNormalsKeyword, matParams.sourceDepthNormals);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SourceDepthHighKeyword, matParams.sourceDepthHigh);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SourceDepthMediumKeyword, matParams.sourceDepthMedium);
CoreUtils.SetKeyword(m_Material, ScreenSpaceAmbientOcclusion.k_SourceDepthLowKeyword, matParams.sourceDepthLow);
m_Material.SetVector(s_SSAOParamsID, matParams.ssaoParams);
}
/*----------------------------------------------------------------------------------------------------------------------------------------
------------------------------------------------------------- RENDER-GRAPH --------------------------------------------------------------
----------------------------------------------------------------------------------------------------------------------------------------*/
private class SSAOPassData
{
internal bool afterOpaque;
internal ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions BlurQuality;
internal Material material;
internal float directLightingStrength;
internal TextureHandle cameraColor;
internal TextureHandle AOTexture;
internal TextureHandle finalTexture;
internal TextureHandle blurTexture;
internal TextureHandle cameraNormalsTexture;
}
private void InitSSAOPassData(ref SSAOPassData data)
{
data.material = m_Material;
data.BlurQuality = m_CurrentSettings.BlurQuality;
data.afterOpaque = m_CurrentSettings.AfterOpaque;
data.directLightingStrength = m_CurrentSettings.DirectLightingStrength;
}
public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
// Create the texture handles...
CreateRenderTextureHandles(renderGraph,
resourceData,
cameraData,
out TextureHandle aoTexture,
out TextureHandle blurTexture,
out TextureHandle finalTexture);
// Get the resources
TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture;
TextureHandle cameraNormalsTexture = resourceData.cameraNormalsTexture;
// Update keywords and other shader params
SetupKeywordsAndParameters(ref m_CurrentSettings, ref cameraData);
using (IUnsafeRenderGraphBuilder builder = renderGraph.AddUnsafePass<SSAOPassData>("Blit SSAO", out var passData, m_ProfilingSampler))
{
// Shader keyword changes are considered as global state modifications
builder.AllowGlobalStateModification(true);
// Fill in the Pass data...
InitSSAOPassData(ref passData);
passData.cameraColor = resourceData.cameraColor;
passData.AOTexture = aoTexture;
passData.finalTexture = finalTexture;
passData.blurTexture = blurTexture;
// Declare input textures
builder.UseTexture(passData.AOTexture, AccessFlags.ReadWrite);
// TODO: Refactor to eliminate the need for 'UseTexture'.
// Currently required only because 'PostProcessUtils.SetSourceSize' allocates an RTHandle,
// which expects a valid graphicsResource. Without this call, 'cameraColor.graphicsResource'
// may be null if it wasn't initialized in an earlier pass (e.g., DrawOpaque).
if (resourceData.cameraColor.IsValid())
builder.UseTexture(resourceData.cameraColor, AccessFlags.Read);
if (passData.BlurQuality != ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Low)
builder.UseTexture(passData.blurTexture, AccessFlags.ReadWrite);
if (cameraDepthTexture.IsValid())
builder.UseTexture(cameraDepthTexture, AccessFlags.Read);
if (m_CurrentSettings.Source == ScreenSpaceAmbientOcclusionSettings.DepthSource.DepthNormals && cameraNormalsTexture.IsValid())
{
builder.UseTexture(cameraNormalsTexture, AccessFlags.Read);
passData.cameraNormalsTexture = cameraNormalsTexture;
}
// The global SSAO texture only needs to be set if After Opaque is disabled...
if (!passData.afterOpaque && finalTexture.IsValid())
{
builder.UseTexture(passData.finalTexture, AccessFlags.ReadWrite);
builder.SetGlobalTextureAfterPass(finalTexture, s_SSAOFinalTextureID);
}
builder.SetRenderFunc((SSAOPassData data, UnsafeGraphContext rgContext) =>
{
CommandBuffer cmd = CommandBufferHelpers.GetNativeCommandBuffer(rgContext.cmd);
RenderBufferLoadAction finalLoadAction = data.afterOpaque ? RenderBufferLoadAction.Load : RenderBufferLoadAction.DontCare;
// Setup
if (data.cameraColor.IsValid())
PostProcessUtils.SetSourceSize(cmd, data.cameraColor);
if (data.cameraNormalsTexture.IsValid())
data.material.SetTexture(s_CameraNormalsTextureID, data.cameraNormalsTexture);
// AO Pass
Blitter.BlitCameraTexture(cmd, data.AOTexture, data.AOTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, data.material, (int) ShaderPasses.AmbientOcclusion);
// Blur passes
switch (data.BlurQuality)
{
// Bilateral
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.High:
Blitter.BlitCameraTexture(cmd, data.AOTexture, data.blurTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, data.material, (int) ShaderPasses.BilateralBlurHorizontal);
Blitter.BlitCameraTexture(cmd, data.blurTexture, data.AOTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, data.material, (int) ShaderPasses.BilateralBlurVertical);
Blitter.BlitCameraTexture(cmd, data.AOTexture, data.finalTexture, finalLoadAction, RenderBufferStoreAction.Store, data.material, (int) (data.afterOpaque ? ShaderPasses.BilateralAfterOpaque : ShaderPasses.BilateralBlurFinal));
break;
// Gaussian
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Medium:
Blitter.BlitCameraTexture(cmd, data.AOTexture, data.blurTexture, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, data.material, (int) ShaderPasses.GaussianBlurHorizontal);
Blitter.BlitCameraTexture(cmd, data.blurTexture, data.finalTexture, finalLoadAction, RenderBufferStoreAction.Store, data.material, (int) (data.afterOpaque ? ShaderPasses.GaussianAfterOpaque : ShaderPasses.GaussianBlurVertical));
break;
// Kawase
case ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Low:
Blitter.BlitCameraTexture(cmd, data.AOTexture, data.finalTexture, finalLoadAction, RenderBufferStoreAction.Store, data.material, (int) (data.afterOpaque ? ShaderPasses.KawaseAfterOpaque : ShaderPasses.KawaseBlur));
break;
default:
throw new ArgumentOutOfRangeException();
}
// We only want URP shaders to sample SSAO if After Opaque is disabled...
if (!data.afterOpaque)
{
rgContext.cmd.SetKeyword(ShaderGlobalKeywords.ScreenSpaceOcclusion, true);
rgContext.cmd.SetGlobalVector(s_AmbientOcclusionParamID, new Vector4(1f, 0f, 0f, data.directLightingStrength));
}
});
}
}
private void CreateRenderTextureHandles(RenderGraph renderGraph, UniversalResourceData resourceData,
UniversalCameraData cameraData, out TextureHandle aoTexture, out TextureHandle blurTexture, out TextureHandle finalTexture)
{
// Descriptor for the final blur pass
RenderTextureDescriptor finalTextureDescriptor = cameraData.cameraTargetDescriptor;
finalTextureDescriptor.colorFormat = m_SupportsR8RenderTextureFormat ? RenderTextureFormat.R8 : RenderTextureFormat.ARGB32;
finalTextureDescriptor.depthStencilFormat = GraphicsFormat.None;
finalTextureDescriptor.msaaSamples = 1;
// Descriptor for the AO and Blur passes
int downsampleDivider = m_CurrentSettings.Downsample ? 2 : 1;
bool useRedComponentOnly = m_SupportsR8RenderTextureFormat && m_BlurType > BlurTypes.Bilateral;
RenderTextureDescriptor aoBlurDescriptor = finalTextureDescriptor;
aoBlurDescriptor.colorFormat = useRedComponentOnly ? RenderTextureFormat.R8 : RenderTextureFormat.ARGB32;
aoBlurDescriptor.width /= downsampleDivider;
aoBlurDescriptor.height /= downsampleDivider;
// Handles
aoTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, aoBlurDescriptor, "_SSAO_OcclusionTexture0", false, FilterMode.Bilinear);
finalTexture = m_CurrentSettings.AfterOpaque ? resourceData.activeColorTexture : UniversalRenderer.CreateRenderGraphTexture(renderGraph, finalTextureDescriptor, k_SSAOTextureName, false, FilterMode.Bilinear);
if (m_CurrentSettings.BlurQuality != ScreenSpaceAmbientOcclusionSettings.BlurQualityOptions.Low)
blurTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, aoBlurDescriptor, "_SSAO_OcclusionTexture1", false, FilterMode.Bilinear);
else
blurTexture = TextureHandle.nullHandle;
if (!m_CurrentSettings.AfterOpaque)
resourceData.ssaoTexture = finalTexture;
}
/*----------------------------------------------------------------------------------------------------------------------------------------
------------------------------------------------------------- RENDER-GRAPH --------------------------------------------------------------
----------------------------------------------------------------------------------------------------------------------------------------*/
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
ContextContainer frameData = renderingData.frameData;
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
// Fill in the Pass data...
InitSSAOPassData(ref m_PassData);
// Update keywords and other shader params
SetupKeywordsAndParameters(ref m_CurrentSettings, ref cameraData);
// Set up the descriptors
int downsampleDivider = m_CurrentSettings.Downsample ? 2 : 1;
RenderTextureDescriptor descriptor = renderingData.cameraData.cameraTargetDescriptor;
descriptor.msaaSamples = 1;
descriptor.depthStencilFormat = GraphicsFormat.None;
// AO PAss
m_AOPassDescriptor = descriptor;
m_AOPassDescriptor.width /= downsampleDivider;
m_AOPassDescriptor.height /= downsampleDivider;
bool useRedComponentOnly = m_SupportsR8RenderTextureFormat && m_BlurType > BlurTypes.Bilateral;
m_AOPassDescriptor.colorFormat = useRedComponentOnly ? RenderTextureFormat.R8 : RenderTextureFormat.ARGB32;
// Allocate textures for the AO and blur
RenderingUtils.ReAllocateHandleIfNeeded(ref m_SSAOTextures[0], m_AOPassDescriptor, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_SSAO_OcclusionTexture0");
RenderingUtils.ReAllocateHandleIfNeeded(ref m_SSAOTextures[1], m_AOPassDescriptor, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_SSAO_OcclusionTexture1");
RenderingUtils.ReAllocateHandleIfNeeded(ref m_SSAOTextures[2], m_AOPassDescriptor, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_SSAO_OcclusionTexture2");
// Upsample setup
m_AOPassDescriptor.width *= downsampleDivider;
m_AOPassDescriptor.height *= downsampleDivider;
m_AOPassDescriptor.colorFormat = m_SupportsR8RenderTextureFormat ? RenderTextureFormat.R8 : RenderTextureFormat.ARGB32;
// Allocate texture for the final SSAO results
RenderingUtils.ReAllocateHandleIfNeeded(ref m_SSAOTextures[3], m_AOPassDescriptor, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_SSAO_OcclusionTexture");
PostProcessUtils.SetSourceSize(cmd, m_SSAOTextures[3]);
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
// Configure targets and clear color
ConfigureTarget(m_CurrentSettings.AfterOpaque ? m_Renderer.cameraColorTargetHandle : m_SSAOTextures[3]);
ConfigureClear(ClearFlag.None, Color.white);
#pragma warning restore CS0618
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_Material == null)
{
Debug.LogErrorFormat(
"{0}.Execute(): Missing material. ScreenSpaceAmbientOcclusion pass will not execute. Check for missing reference in the renderer resources.",
GetType().Name);
return;
}
var cmd = renderingData.commandBuffer;
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.SSAO)))
{
// We only want URP shaders to sample SSAO if After Opaque is off.
if (!m_CurrentSettings.AfterOpaque)
cmd.SetKeyword(ShaderGlobalKeywords.ScreenSpaceOcclusion, true);
cmd.SetGlobalTexture(k_SSAOTextureName, m_SSAOTextures[3]);
#if ENABLE_VR && ENABLE_XR_MODULE
bool isFoveatedEnabled = false;
if (renderingData.cameraData.xr.supportsFoveatedRendering)
{
// If we are downsampling we can't use the VRS texture
// If it's a non uniform raster foveated rendering has to be turned off because it will keep applying non uniform for the other passes.
// When calculating normals from depth, this causes artifacts that are amplified from VRS when going to say 4x4. Thus we disable foveated because of that
if (m_CurrentSettings.Downsample || SystemInfo.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster) ||
(SystemInfo.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.FoveationImage) && m_CurrentSettings.Source == ScreenSpaceAmbientOcclusionSettings.DepthSource.Depth))
{
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
}
// If we aren't downsampling and it's a VRS texture we can apply foveation in this case
else if (SystemInfo.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.FoveationImage))
{
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Enabled);
isFoveatedEnabled = true;
}
}
#endif
GetPassOrder(m_BlurType, m_CurrentSettings.AfterOpaque, out int[] textureIndices, out ShaderPasses[] shaderPasses);
// Execute the SSAO Occlusion pass
RTHandle cameraDepthTargetHandle = renderingData.cameraData.renderer.cameraDepthTargetHandle;
RenderAndSetBaseMap(ref cmd, ref renderingData, ref renderingData.cameraData.renderer, ref m_Material, ref cameraDepthTargetHandle, ref m_SSAOTextures[0], ShaderPasses.AmbientOcclusion);
// Execute the Blur Passes
for (int i = 0; i < shaderPasses.Length; i++)
{
int baseMapIndex = textureIndices[i];
int targetIndex = textureIndices[i + 1];
RenderAndSetBaseMap(ref cmd, ref renderingData, ref renderingData.cameraData.renderer, ref m_Material, ref m_SSAOTextures[baseMapIndex], ref m_SSAOTextures[targetIndex], shaderPasses[i]);
}
// Set the global SSAO Params
cmd.SetGlobalVector(s_AmbientOcclusionParamID, new Vector4(1f, 0f, 0f, m_CurrentSettings.DirectLightingStrength));
#if ENABLE_VR && ENABLE_XR_MODULE
// Cleanup, making sure it doesn't stay enabled for a pass after that should not have it on
if (isFoveatedEnabled)
cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled);
#endif
}
}
private static void RenderAndSetBaseMap(ref CommandBuffer cmd, ref RenderingData renderingData, ref ScriptableRenderer renderer, ref Material mat, ref RTHandle baseMap, ref RTHandle target, ShaderPasses pass)
{
if (IsAfterOpaquePass(ref pass))
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
Blitter.BlitCameraTexture(cmd, baseMap, renderer.cameraColorTargetHandle, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, mat, (int)pass);
#pragma warning restore CS0618
}
else if (baseMap.rt == null)
{
// Obsolete usage of RTHandle aliasing a RenderTargetIdentifier
Vector2 viewportScale = baseMap.useScaling ? new Vector2(baseMap.rtHandleProperties.rtHandleScale.x, baseMap.rtHandleProperties.rtHandleScale.y) : Vector2.one;
// Will set the correct camera viewport as well.
CoreUtils.SetRenderTarget(cmd, target);
Blitter.BlitTexture(cmd, baseMap.nameID, viewportScale, mat, (int)pass);
}
else
Blitter.BlitCameraTexture(cmd, baseMap, target, mat, (int)pass);
}
private static void GetPassOrder(BlurTypes blurType, bool isAfterOpaque, out int[] textureIndices, out ShaderPasses[] shaderPasses)
{
switch (blurType)
{
case BlurTypes.Bilateral:
textureIndices = m_BilateralTexturesIndices;
shaderPasses = isAfterOpaque ? m_BilateralAfterOpaquePasses : m_BilateralPasses;
break;
case BlurTypes.Gaussian:
textureIndices = m_GaussianTexturesIndices;
shaderPasses = isAfterOpaque ? m_GaussianAfterOpaquePasses : m_GaussianPasses;
break;
case BlurTypes.Kawase:
textureIndices = m_KawaseTexturesIndices;
shaderPasses = isAfterOpaque ? m_KawaseAfterOpaquePasses : m_KawasePasses;
break;
default:
throw new ArgumentOutOfRangeException();
}
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (!m_CurrentSettings.AfterOpaque)
cmd.SetKeyword(ShaderGlobalKeywords.ScreenSpaceOcclusion, false);
}
public void Dispose()
{
m_SSAOTextures[0]?.Release();
m_SSAOTextures[1]?.Release();
m_SSAOTextures[2]?.Release();
m_SSAOTextures[3]?.Release();
m_SSAOParamsPrev = default;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2afeda08c4698463daec0f82157e1261
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,919 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using Unity.Collections;
using UnityEngine.Scripting.APIUpdating;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Input requirements for <c>ScriptableRenderPass</c>.
///
/// URP adds render passes to generate the inputs, or reuses inputs that are already available from earlier in the frame.
///
/// URP binds the inputs as global shader texture properties.
/// </summary>
/// <seealso cref="ConfigureInput"/>
[Flags]
public enum ScriptableRenderPassInput
{
/// <summary>
/// Used when a <c>ScriptableRenderPass</c> does not require any texture.
/// </summary>
None = 0,
/// <summary>
/// Used when a <c>ScriptableRenderPass</c> requires a depth texture.
///
/// To sample the depth texture in a shader, include `Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareDepthTexture.hlsl`, then use the `SampleSceneDepth` method.
/// </summary>
Depth = 1 << 0,
/// <summary>
/// Used when a <c>ScriptableRenderPass</c> requires a normal texture.
///
/// To sample the normals texture in a shader, include `Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareNormalsTexture.hlsl`, then use the `SampleSceneNormals` method.
/// </summary>
Normal = 1 << 1,
/// <summary>
/// Used when a <c>ScriptableRenderPass</c> requires a color texture.
///
/// To sample the color texture in a shader, include `Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareOpaqueTexture.hlsl`, then use the `SampleSceneColor` method.
///
/// **Note:** The opaque texture might be a downscaled copy of the framebuffer from before rendering transparent objects.
/// </summary>
Color = 1 << 2,
/// <summary>
/// Used when a <c>ScriptableRenderPass</c> requires a motion vectors texture.
///
/// To sample the motion vectors texture in a shader, use `TEXTURE2D_X(_MotionVectorTexture)`, then `LOAD_TEXTURE2D_X_LOD(_MotionVectorTexture, pixelCoords, 0).xy`.
/// </summary>
Motion = 1 << 3,
}
// Note: Spaced built-in events so we can add events in between them
// We need to leave room as we sort render passes based on event.
// Users can also inject render pass events in a specific point by doing RenderPassEvent + offset
/// <summary>
/// Controls when the render pass executes.
/// </summary>
public enum RenderPassEvent
{
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering any other passes in the pipeline.
/// Camera matrices and stereo rendering are not setup this point.
/// You can use this to draw to custom input textures used later in the pipeline, f.ex LUT textures.
/// </summary>
BeforeRendering = 0,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering shadowmaps.
/// Camera matrices and stereo rendering are not setup this point.
/// </summary>
BeforeRenderingShadows = 50,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering shadowmaps.
/// Camera matrices and stereo rendering are not setup this point.
/// </summary>
AfterRenderingShadows = 100,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering prepasses, f.ex, depth prepass.
/// Camera matrices and stereo rendering are already setup at this point.
/// </summary>
BeforeRenderingPrePasses = 150,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering prepasses, f.ex, depth prepass.
/// Camera matrices and stereo rendering are already setup at this point.
/// </summary>
AfterRenderingPrePasses = 200,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering gbuffer pass.
/// </summary>
BeforeRenderingGbuffer = 210,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering gbuffer pass.
/// </summary>
AfterRenderingGbuffer = 220,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering deferred shading pass.
/// </summary>
BeforeRenderingDeferredLights = 230,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering deferred shading pass.
/// </summary>
AfterRenderingDeferredLights = 240,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering opaque objects.
/// </summary>
BeforeRenderingOpaques = 250,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering opaque objects.
/// </summary>
AfterRenderingOpaques = 300,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering the sky.
/// </summary>
BeforeRenderingSkybox = 350,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering the sky.
/// </summary>
AfterRenderingSkybox = 400,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering transparent objects.
/// </summary>
BeforeRenderingTransparents = 450,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering transparent objects.
/// </summary>
AfterRenderingTransparents = 500,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> before rendering post-processing effects.
/// </summary>
BeforeRenderingPostProcessing = 550,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering post-processing effects but before final blit, post-processing AA effects and color grading.
/// </summary>
AfterRenderingPostProcessing = 600,
/// <summary>
/// Executes a <c>ScriptableRenderPass</c> after rendering all effects.
/// </summary>
AfterRendering = 1000,
}
/// <summary>
/// Framebuffer fetch events in Universal RP
/// </summary>
internal enum FramebufferFetchEvent
{
None = 0,
FetchGbufferInDeferred = 1
}
internal static class RenderPassEventsEnumValues
{
// we cache the values in this array at construction time to avoid runtime allocations, which we would cause if we accessed valuesInternal directly
public static int[] values;
static RenderPassEventsEnumValues()
{
System.Array valuesInternal = Enum.GetValues(typeof(RenderPassEvent));
values = new int[valuesInternal.Length];
int index = 0;
foreach (int value in valuesInternal)
{
values[index] = value;
index++;
}
}
}
/// <summary>
/// <c>ScriptableRenderPass</c> implements a logical rendering pass that can be used to extend Universal RP renderer.
/// </summary>
/// <remarks>
/// To implement your own rendering pass you need to take the following steps:
/// 1. Create a new Subclass from ScriptableRenderPass that implements the rendering logic.
/// 2. Create an instance of your subclass and set up the relevant parameters such as <c>ScriptableRenderPass.renderPassEvent</c> in the constructor or initialization code.
/// 3. Ensure your pass instance gets picked up by URP, this can be done through a <c>ScriptableRendererFeature</c> or by calling <c>ScriptableRenderer.EnqueuePass</c> from an event callback like <c>RenderPipelineManager.beginCameraRendering</c>
///
/// See [link] for more info on working with a <c>ScriptableRendererFeature</c> or [link] for more info on working with <c>ScriptableRenderer.EnqueuePass</c>.
/// </remarks>
public abstract partial class ScriptableRenderPass: IRenderGraphRecorder
{
/// <summary>
/// RTHandle alias for BuiltinRenderTextureType.CameraTarget which is the backbuffer.
/// </summary>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public static RTHandle k_CameraTarget = RTHandles.Alloc(BuiltinRenderTextureType.CameraTarget);
/// <summary>
/// The event when the render pass executes.
/// </summary>
public RenderPassEvent renderPassEvent { get; set; }
/// <summary>
/// The render target identifiers for color attachments.
/// This is obsolete, use colorAttachmentHandles instead.
/// </summary>
[Obsolete("Use colorAttachmentHandles", true)]
public RenderTargetIdentifier[] colorAttachments => throw new NotSupportedException("colorAttachments has been deprecated. Use colorAttachmentHandles instead.");
/// <summary>
/// The render target identifier for color attachment.
/// This is obsolete, use colorAttachmentHandle instead.
/// </summary>
[Obsolete("Use colorAttachmentHandle", true)]
public RenderTargetIdentifier[] colorAttachment => throw new NotSupportedException("colorAttachment has been deprecated. Use colorAttachmentHandle instead.");
/// <summary>
/// The render target identifier for depth attachment.
/// This is obsolete, use depthAttachmentHandle instead.
/// </summary>
[Obsolete("Use depthAttachmentHandle", true)]
public RenderTargetIdentifier depthAttachment => throw new NotSupportedException("depthAttachment has been deprecated. Use depthAttachmentHandle instead.");
/// <summary>
/// List for the g-buffer attachment handles.
/// </summary>
public RTHandle[] colorAttachmentHandles => m_ColorAttachments;
/// <summary>
/// The main color attachment handle.
/// </summary>
public RTHandle colorAttachmentHandle => m_ColorAttachments[0];
/// <summary>
/// The depth attachment handle.
/// </summary>
public RTHandle depthAttachmentHandle => m_DepthAttachment;
/// <summary>
/// The store actions for Color.
/// </summary>
public RenderBufferStoreAction[] colorStoreActions => m_ColorStoreActions;
/// <summary>
/// The store actions for Depth.
/// </summary>
public RenderBufferStoreAction depthStoreAction => m_DepthStoreAction;
internal bool[] overriddenColorStoreActions => m_OverriddenColorStoreActions;
internal bool overriddenDepthStoreAction => m_OverriddenDepthStoreAction;
/// <summary>
/// The input requirements for the <c>ScriptableRenderPass</c>, which has been set using <c>ConfigureInput</c>
/// </summary>
/// <seealso cref="ConfigureInput"/>
public ScriptableRenderPassInput input => m_Input;
/// <summary>
/// The flag to use when clearing.
/// </summary>
/// <seealso cref="ClearFlag"/>
public ClearFlag clearFlag => m_ClearFlag;
/// <summary>
/// The color value to use when clearing.
/// </summary>
public Color clearColor => m_ClearColor;
RenderBufferStoreAction[] m_ColorStoreActions = new RenderBufferStoreAction[] { RenderBufferStoreAction.Store };
RenderBufferStoreAction m_DepthStoreAction = RenderBufferStoreAction.Store;
/// <summary>
/// Setting this property to true forces rendering of all passes in the URP frame via an intermediate texture. Use this option for passes that do not support rendering directly to the backbuffer or that require sampling the active color target. Using this option might have a significant performance impact on untethered VR platforms.
/// </summary>
public bool requiresIntermediateTexture { get; set; }
// by default all store actions are Store. The overridden flags are used to keep track of explicitly requested store actions, to
// help figuring out the correct final store action for merged render passes when using the RenderPass API.
private bool[] m_OverriddenColorStoreActions = new bool[] { false };
private bool m_OverriddenDepthStoreAction = false;
private ProfilingSampler m_ProfingSampler;
private string m_PassName;
private RenderGraphSettings m_RenderGraphSettings;
/// <summary>
/// A ProfilingSampler for the entire render pass. Used as a profiling name by <c>ScriptableRenderer</c> when executing the pass.
/// The default is named as the class type of the sub-class.
/// Set <c>base.profilingSampler</c> from the sub-class constructor to set a different profiling name for a custom <c>ScriptableRenderPass
/// This returns null in release build (non-development).</c>.
/// </summary>
protected internal ProfilingSampler profilingSampler
{
get
{
//We only need this in release (non-dev build) but putting it here to track it in more test automation.
if (m_RenderGraphSettings == null)
{
m_RenderGraphSettings = GraphicsSettings.GetRenderPipelineSettings<RenderGraphSettings>();
}
#if (DEVELOPMENT_BUILD || UNITY_EDITOR)
return m_ProfingSampler;
#else
//We only remove the sampler in release build when not in Compatibility Mode to avoid breaking user projects in the very unlikely scenario they would get the sampler.
return m_RenderGraphSettings.enableRenderCompatibilityMode ? m_ProfingSampler : null;
#endif
}
set
{
m_ProfingSampler = value;
m_PassName = (value != null) ? value.name : this.GetType().Name;
}
}
/// <summary>
/// The name of the pass that will show up in profiler and other tools. This will be indentical to the
/// name of <c>profilingSampler</c>. <c>profilingSampler</c> is set to null in the release build (non-development)
/// so this <c>passName</c> property is the safe way to access the name and use it consistently. This will always return a valid string.
/// </summary>
protected internal string passName{ get { return m_PassName; } }
internal bool overrideCameraTarget { get; set; }
internal bool isBlitRenderPass { get; set; }
internal bool useNativeRenderPass { get; set; }
// index to track the position in the current frame
internal int renderPassQueueIndex { get; set; }
internal NativeArray<int> m_ColorAttachmentIndices;
internal NativeArray<int> m_InputAttachmentIndices;
internal GraphicsFormat[] renderTargetFormat { get; set; }
RTHandle[] m_ColorAttachments;
internal RTHandle[] m_InputAttachments = new RTHandle[8];
internal bool[] m_InputAttachmentIsTransient = new bool[8];
RTHandle m_DepthAttachment;
ScriptableRenderPassInput m_Input = ScriptableRenderPassInput.None;
ClearFlag m_ClearFlag = ClearFlag.None;
Color m_ClearColor = Color.black;
static internal DebugHandler GetActiveDebugHandler(UniversalCameraData cameraData)
{
var debugHandler = cameraData.renderer.DebugHandler;
if ((debugHandler != null) && debugHandler.IsActiveForCamera(cameraData.isPreviewCamera))
return debugHandler;
return null;
}
/// <summary>
/// Creates a new <c>ScriptableRenderPass"</c> instance.
/// </summary>
public ScriptableRenderPass()
{
renderPassEvent = RenderPassEvent.AfterRenderingOpaques;
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
m_ColorAttachments = new RTHandle[] { k_CameraTarget, null, null, null, null, null, null, null };
m_DepthAttachment = k_CameraTarget;
#pragma warning restore CS0618
m_InputAttachments = new RTHandle[] { null, null, null, null, null, null, null, null };
m_InputAttachmentIsTransient = new bool[] { false, false, false, false, false, false, false, false };
m_ColorStoreActions = new RenderBufferStoreAction[] { RenderBufferStoreAction.Store, 0, 0, 0, 0, 0, 0, 0 };
m_DepthStoreAction = RenderBufferStoreAction.Store;
m_OverriddenColorStoreActions = new bool[] { false, false, false, false, false, false, false, false };
m_OverriddenDepthStoreAction = false;
m_ClearFlag = ClearFlag.None;
m_ClearColor = Color.black;
overrideCameraTarget = false;
isBlitRenderPass = false;
useNativeRenderPass = true;
renderPassQueueIndex = -1;
renderTargetFormat = new GraphicsFormat[]
{
GraphicsFormat.None, GraphicsFormat.None, GraphicsFormat.None,
GraphicsFormat.None, GraphicsFormat.None, GraphicsFormat.None, GraphicsFormat.None, GraphicsFormat.None
};
profilingSampler = new ProfilingSampler(this.GetType().Name);
}
/// <summary>
/// Configures Input Requirements for this render pass.
/// This method should be called inside <c>ScriptableRendererFeature.AddRenderPasses</c>.
/// </summary>
/// <param name="passInput">ScriptableRenderPassInput containing information about what requirements the pass needs.</param>
/// <seealso cref="ScriptableRendererFeature.AddRenderPasses"/>
public void ConfigureInput(ScriptableRenderPassInput passInput)
{
m_Input = passInput;
}
/// <summary>
/// Configures the Store Action for a color attachment of this render pass.
/// </summary>
/// <param name="storeAction">RenderBufferStoreAction to use</param>
/// <param name="attachmentIndex">Index of the color attachment</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureColorStoreAction(RenderBufferStoreAction storeAction, uint attachmentIndex = 0)
{
m_ColorStoreActions[attachmentIndex] = storeAction;
m_OverriddenColorStoreActions[attachmentIndex] = true;
}
/// <summary>
/// Configures the Store Actions for all the color attachments of this render pass.
/// </summary>
/// <param name="storeActions">Array of RenderBufferStoreActions to use</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureColorStoreActions(RenderBufferStoreAction[] storeActions)
{
int count = Math.Min(storeActions.Length, m_ColorStoreActions.Length);
for (uint i = 0; i < count; ++i)
{
m_ColorStoreActions[i] = storeActions[i];
m_OverriddenColorStoreActions[i] = true;
}
}
/// <summary>
/// Configures the Store Action for the depth attachment of this render pass.
/// </summary>
/// <param name="storeAction">RenderBufferStoreAction to use</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureDepthStoreAction(RenderBufferStoreAction storeAction)
{
m_DepthStoreAction = storeAction;
m_OverriddenDepthStoreAction = true;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal void ConfigureInputAttachments(RTHandle input, bool isTransient = false)
{
m_InputAttachments[0] = input;
m_InputAttachmentIsTransient[0] = isTransient;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal void ConfigureInputAttachments(RTHandle[] inputs)
{
m_InputAttachments = inputs;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal void ConfigureInputAttachments(RTHandle[] inputs, bool[] isTransient)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureInputAttachments(inputs);
#pragma warning restore CS0618
m_InputAttachmentIsTransient = isTransient;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal void SetInputAttachmentTransient(int idx, bool isTransient)
{
m_InputAttachmentIsTransient[idx] = isTransient;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal bool IsInputAttachmentTransient(int idx)
{
return m_InputAttachmentIsTransient[idx];
}
/// <summary>
/// Resets render targets to default.
/// This method effectively reset changes done by ConfigureTarget.
/// </summary>
/// <seealso cref="ConfigureTarget"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ResetTarget()
{
overrideCameraTarget = false;
// Reset depth
m_DepthAttachment = null;
// Reset colors
m_ColorAttachments[0] = null;
for (int i = 1; i < m_ColorAttachments.Length; ++i)
{
m_ColorAttachments[i] = null;
}
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachment">Color attachment identifier.</param>
/// <param name="depthAttachment">Depth attachment identifier.</param>
/// <seealso cref="Configure"/>
[Obsolete("Use RTHandles for colorAttachment and depthAttachment", true)]
public void ConfigureTarget(RenderTargetIdentifier colorAttachment, RenderTargetIdentifier depthAttachment)
{
throw new NotSupportedException("ConfigureTarget with RenderTargetIdentifier has been deprecated. Use RTHandles instead");
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachment">Color attachment handle.</param>
/// <param name="depthAttachment">Depth attachment handle.</param>
/// <seealso cref="Configure"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureTarget(RTHandle colorAttachment, RTHandle depthAttachment)
{
overrideCameraTarget = true;
m_DepthAttachment = depthAttachment;
m_ColorAttachments[0] = colorAttachment;
for (int i = 1; i < m_ColorAttachments.Length; ++i)
{
m_ColorAttachments[i] = null;
}
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachments">Color attachment identifier.</param>
/// <param name="depthAttachment">Depth attachment identifier.</param>
/// <seealso cref="Configure"/>
[Obsolete("Use RTHandles for colorAttachments and depthAttachment", true)]
public void ConfigureTarget(RenderTargetIdentifier[] colorAttachments, RenderTargetIdentifier depthAttachment)
{
throw new NotSupportedException("ConfigureTarget with RenderTargetIdentifier has been deprecated. Use it with RTHandles instead");
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachments">Color attachment handle.</param>
/// <param name="depthAttachment">Depth attachment handle.</param>
/// <seealso cref="Configure"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureTarget(RTHandle[] colorAttachments, RTHandle depthAttachment)
{
overrideCameraTarget = true;
uint nonNullColorBuffers = RenderingUtils.GetValidColorBufferCount(colorAttachments);
if (nonNullColorBuffers > SystemInfo.supportedRenderTargetCount)
Debug.LogError("Trying to set " + nonNullColorBuffers + " renderTargets, which is more than the maximum supported:" + SystemInfo.supportedRenderTargetCount);
if (colorAttachments.Length > m_ColorAttachments.Length)
Debug.LogError("Trying to set " + colorAttachments.Length + " color attachments, which is more than the maximum supported:" + m_ColorAttachments.Length);
for (int i = 0; i < colorAttachments.Length; ++i)
{
m_ColorAttachments[i] = colorAttachments[i];
}
for (int i = colorAttachments.Length; i < m_ColorAttachments.Length; ++i)
{
m_ColorAttachments[i] = null;
}
m_DepthAttachment = depthAttachment;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
internal void ConfigureTarget(RTHandle[] colorAttachments, RTHandle depthAttachment, GraphicsFormat[] formats)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(colorAttachments, depthAttachment);
#pragma warning restore CS0618
for (int i = 0; i < formats.Length; ++i)
renderTargetFormat[i] = formats[i];
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachment">Color attachment identifier.</param>
/// <seealso cref="Configure"/>
[Obsolete("Use RTHandle for colorAttachment", true)]
public void ConfigureTarget(RenderTargetIdentifier colorAttachment)
{
throw new NotSupportedException("ConfigureTarget with RenderTargetIdentifier has been deprecated. Use it with RTHandles instead");
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachment">Color attachment handle.</param>
/// <seealso cref="Configure"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureTarget(RTHandle colorAttachment)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(colorAttachment, k_CameraTarget);
#pragma warning restore CS0618
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachments">Color attachment identifiers.</param>
/// <seealso cref="Configure"/>
[Obsolete("Use RTHandles for colorAttachments", true)]
public void ConfigureTarget(RenderTargetIdentifier[] colorAttachments)
{
throw new NotSupportedException("ConfigureTarget with RenderTargetIdentifier has been deprecated. Use it with RTHandles instead");
}
/// <summary>
/// Configures render targets for this render pass. Call this instead of CommandBuffer.SetRenderTarget.
/// This method should be called inside Configure.
/// </summary>
/// <param name="colorAttachments">Color attachment handle.</param>
/// <seealso cref="Configure"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureTarget(RTHandle[] colorAttachments)
{
// Disable obsolete warning for internal usage
#pragma warning disable CS0618
ConfigureTarget(colorAttachments, k_CameraTarget);
#pragma warning restore CS0618
}
/// <summary>
/// Configures clearing for the render targets for this render pass. Call this inside Configure.
/// </summary>
/// <param name="clearFlag">ClearFlag containing information about what targets to clear.</param>
/// <param name="clearColor">Clear color.</param>
/// <seealso cref="Configure"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void ConfigureClear(ClearFlag clearFlag, Color clearColor)
{
m_ClearFlag = clearFlag;
m_ClearColor = clearColor;
}
/// <summary>
/// This method is called by the renderer before rendering a camera
/// Override this method if you need to to configure render targets and their clear state, and to create temporary render target textures.
/// If a render pass doesn't override this method, this render pass renders to the active Camera's render target.
/// You should never call CommandBuffer.SetRenderTarget. Instead call <c>ConfigureTarget</c> and <c>ConfigureClear</c>.
/// </summary>
/// <param name="cmd">CommandBuffer to enqueue rendering commands. This will be executed by the pipeline.</param>
/// <param name="renderingData">Current rendering state information</param>
/// <seealso cref="ConfigureTarget"/>
/// <seealso cref="ConfigureClear"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public virtual void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{ }
/// <summary>
/// This method is called by the renderer before executing the render pass.
/// Override this method if you need to to configure render targets and their clear state, and to create temporary render target textures.
/// If a render pass doesn't override this method, this render pass renders to the active Camera's render target.
/// You should never call CommandBuffer.SetRenderTarget. Instead call <c>ConfigureTarget</c> and <c>ConfigureClear</c>.
/// </summary>
/// <param name="cmd">CommandBuffer to enqueue rendering commands. This will be executed by the pipeline.</param>
/// <param name="cameraTextureDescriptor">Render texture descriptor of the camera render target.</param>
/// <seealso cref="ConfigureTarget"/>
/// <seealso cref="ConfigureClear"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public virtual void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{ }
/// <summary>
/// Called upon finish rendering a camera. You can use this callback to release any resources created
/// by this render
/// pass that need to be cleanup once camera has finished rendering.
/// This method should be called for all cameras in a camera stack.
/// </summary>
/// <param name="cmd">Use this CommandBuffer to cleanup any generated data</param>
public virtual void OnCameraCleanup(CommandBuffer cmd)
{
}
/// <summary>
/// Called upon finish rendering a camera stack. You can use this callback to release any resources created
/// by this render pass that need to be cleanup once all cameras in the stack have finished rendering.
/// This method will be called once after rendering the last camera in the camera stack.
/// Cameras that don't have an explicit camera stack are also considered stacked rendering.
/// In that case the Base camera is the first and last camera in the stack.
/// </summary>
/// <param name="cmd">Use this CommandBuffer to cleanup any generated data</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public virtual void OnFinishCameraStackRendering(CommandBuffer cmd)
{ }
/// <summary>
/// Execute the pass. This is where custom rendering occurs. Specific details are left to the implementation
/// </summary>
/// <param name="context">Use this render context to issue any draw commands during execution</param>
/// <param name="renderingData">Current rendering state information</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public virtual void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
Debug.LogWarning("Execute is not implemented, the pass " + this.ToString() + " won't be executed in the current render loop.");
}
/// <inheritdoc cref="IRenderGraphRecorder.RecordRenderGraph"/>
public virtual void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
{
Debug.LogWarning("The render pass " + this.ToString() + " does not have an implementation of the RecordRenderGraph method. Please implement this method, or consider turning on Compatibility Mode (RenderGraph disabled) in the menu Edit > Project Settings > Graphics > URP. Otherwise the render pass will have no effect. For more information, refer to https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/index.html?subfolder=/manual/customizing-urp.html.");
}
/// <summary>
/// Add a blit command to the context for execution. This changes the active render target in the ScriptableRenderer to
/// destination.
/// </summary>
/// <param name="cmd">Command buffer to record command for execution.</param>
/// <param name="source">Source texture or target identifier to blit from.</param>
/// <param name="destination">Destination texture or target identifier to blit into. This becomes the renderer active render target.</param>
/// <param name="material">Material to use.</param>
/// <param name="passIndex">Shader pass to use. Default is 0.</param>
/// <seealso cref="ScriptableRenderer"/>
[Obsolete("Use RTHandles for source and destination", true)]
public void Blit(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, Material material = null, int passIndex = 0)
{
throw new NotSupportedException("Blit with RenderTargetIdentifier has been deprecated. Use RTHandles instead");
}
/// <summary>
/// Add a blit command to the context for execution. This changes the active render target in the ScriptableRenderer to
/// destination.
/// </summary>
/// <param name="cmd">Command buffer to record command for execution.</param>
/// <param name="source">Source texture or target handle to blit from.</param>
/// <param name="destination">Destination texture or target handle to blit into. This becomes the renderer active render target.</param>
/// <param name="material">Material to use.</param>
/// <param name="passIndex">Shader pass to use. Default is 0.</param>
/// <seealso cref="ScriptableRenderer"/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void Blit(CommandBuffer cmd, RTHandle source, RTHandle destination, Material material = null, int passIndex = 0)
{
if (material == null)
Blitter.BlitCameraTexture(cmd, source, destination, bilinear: source.rt.filterMode == FilterMode.Bilinear);
else
Blitter.BlitCameraTexture(cmd, source, destination, material, passIndex);
}
/// <summary>
/// Add a blit command to the context for execution. This applies the material to the color target.
/// </summary>
/// <param name="cmd">Command buffer to record command for execution.</param>
/// <param name="data">RenderingData to access the active renderer.</param>
/// <param name="material">Material to use.</param>
/// <param name="passIndex">Shader pass to use. Default is 0.</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void Blit(CommandBuffer cmd, ref RenderingData data, Material material, int passIndex = 0)
{
var renderer = data.cameraData.renderer;
Blit(cmd, renderer.cameraColorTargetHandle, renderer.GetCameraColorFrontBuffer(cmd), material, passIndex);
renderer.SwapColorBuffer(cmd);
}
/// <summary>
/// Add a blit command to the context for execution. This applies the material to the color target.
/// </summary>
/// <param name="cmd">Command buffer to record command for execution.</param>
/// <param name="data">RenderingData to access the active renderer.</param>
/// <param name="source">Source texture or target identifier to blit from.</param>
/// <param name="material">Material to use.</param>
/// <param name="passIndex">Shader pass to use. Default is 0.</param>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public void Blit(CommandBuffer cmd, ref RenderingData data, RTHandle source, Material material, int passIndex = 0)
{
var renderer = data.cameraData.renderer;
Blit(cmd, source, renderer.cameraColorTargetHandle, material, passIndex);
}
/// <summary>
/// Creates <c>DrawingSettings</c> based on current the rendering state.
/// </summary>
/// <param name="shaderTagId">Shader pass tag to render.</param>
/// <param name="renderingData">Current rendering state.</param>
/// <param name="sortingCriteria">Criteria to sort objects being rendered.</param>
/// <returns>Returns the draw settings created.</returns>
/// <seealso cref="DrawingSettings"/>
public DrawingSettings CreateDrawingSettings(ShaderTagId shaderTagId, ref RenderingData renderingData, SortingCriteria sortingCriteria)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
return RenderingUtils.CreateDrawingSettings(shaderTagId, universalRenderingData, cameraData, lightData, sortingCriteria);
}
/// <summary>
/// Creates <c>DrawingSettings</c> based on current the rendering state.
/// </summary>
/// <param name="shaderTagId">Shader pass tag to render.</param>
/// <param name="renderingData">Current rendering state.</param>
/// <param name="cameraData">Current camera state.</param>
/// <param name="lightData">Current light state.</param>
/// <param name="sortingCriteria">Criteria to sort objects being rendered.</param>
/// <returns>Returns the draw settings created.</returns>
/// <seealso cref="DrawingSettings"/>
public DrawingSettings CreateDrawingSettings(ShaderTagId shaderTagId, UniversalRenderingData renderingData,
UniversalCameraData cameraData, UniversalLightData lightData, SortingCriteria sortingCriteria)
{
return RenderingUtils.CreateDrawingSettings(shaderTagId, renderingData, cameraData, lightData, sortingCriteria);
}
/// <summary>
/// Creates <c>DrawingSettings</c> based on current rendering state.
/// </summary>
/// <param name="shaderTagIdList">List of shader pass tag to render.</param>
/// <param name="renderingData">Current rendering state.</param>
/// <param name="sortingCriteria">Criteria to sort objects being rendered.</param>
/// <returns>Returns the draw settings created.</returns>
/// <seealso cref="DrawingSettings"/>
public DrawingSettings CreateDrawingSettings(List<ShaderTagId> shaderTagIdList,
ref RenderingData renderingData, SortingCriteria sortingCriteria)
{
ContextContainer frameData = renderingData.frameData;
UniversalRenderingData universalRenderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalLightData lightData = frameData.Get<UniversalLightData>();
return RenderingUtils.CreateDrawingSettings(shaderTagIdList, universalRenderingData, cameraData, lightData, sortingCriteria);
}
/// <summary>
/// Creates <c>DrawingSettings</c> based on current rendering state.
/// </summary>
/// <param name="shaderTagIdList">List of shader pass tag to render.</param>
/// <param name="renderingData">Current rendering state.</param>
/// <param name="cameraData">Current camera state.</param>
/// <param name="lightData">Current light state.</param>
/// <param name="sortingCriteria">Criteria to sort objects being rendered.</param>
/// <returns>Returns the draw settings created.</returns>
/// <seealso cref="DrawingSettings"/>
public DrawingSettings CreateDrawingSettings(List<ShaderTagId> shaderTagIdList,
UniversalRenderingData renderingData, UniversalCameraData cameraData,
UniversalLightData lightData, SortingCriteria sortingCriteria)
{
return RenderingUtils.CreateDrawingSettings(shaderTagIdList, renderingData, cameraData, lightData, sortingCriteria);
}
/// <summary>
/// Compares two instances of <c>ScriptableRenderPass</c> by their <c>RenderPassEvent</c> and returns if <paramref name="lhs"/> is executed before <paramref name="rhs"/>.
/// </summary>
/// <param name="lhs"></param>
/// <param name="rhs"></param>
/// <returns></returns>
public static bool operator <(ScriptableRenderPass lhs, ScriptableRenderPass rhs)
{
return lhs.renderPassEvent < rhs.renderPassEvent;
}
/// <summary>
/// Compares two instances of <c>ScriptableRenderPass</c> by their <c>RenderPassEvent</c> and returns if <paramref name="lhs"/> is executed after <paramref name="rhs"/>.
/// </summary>
/// <param name="lhs"></param>
/// <param name="rhs"></param>
/// <returns></returns>
public static bool operator >(ScriptableRenderPass lhs, ScriptableRenderPass rhs)
{
return lhs.renderPassEvent > rhs.renderPassEvent;
}
static internal int GetRenderPassEventRange(RenderPassEvent renderPassEvent)
{
int numEvents = RenderPassEventsEnumValues.values.Length;
int currentIndex = 0;
// find the index of the renderPassEvent in the values array
for(int i = 0; i < numEvents; ++i)
{
if (RenderPassEventsEnumValues.values[currentIndex] == (int)renderPassEvent)
break;
currentIndex++;
}
if (currentIndex >= numEvents)
{
Debug.LogError("GetRenderPassEventRange: invalid renderPassEvent value cannot be found in the RenderPassEvent enumeration");
return 0;
}
if (currentIndex + 1 >= numEvents)
return 50; // if this was the last event in the enum, then add 50 as the range
int nextValue = RenderPassEventsEnumValues.values[currentIndex + 1];
return nextValue - (int) renderPassEvent;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: edba24b6007b9dd41824b4656ed8ebcf
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,87 @@
using System;
using Unity.Collections;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Write stencil bits for stencil-based cross-fade LOD.
/// </summary>
sealed class StencilCrossFadeRenderPass
{
private Material[] m_StencilDitherMaskSeedMaterials;
private readonly int _StencilDitherPattern = Shader.PropertyToID("_StencilDitherPattern");
private readonly int _StencilRefDitherMask = Shader.PropertyToID("_StencilRefDitherMask");
private readonly int _StencilWriteDitherMask = Shader.PropertyToID("_StencilWriteDitherMask");
private readonly ProfilingSampler m_ProfilingSampler;
internal StencilCrossFadeRenderPass(Shader shader)
{
m_StencilDitherMaskSeedMaterials = new Material[3];
m_ProfilingSampler = new ProfilingSampler("StencilDitherMaskSeed");
int[] stencilRefs = {
(int)UniversalRendererStencilRef.CrossFadeStencilRef_0,
(int)UniversalRendererStencilRef.CrossFadeStencilRef_1,
(int)UniversalRendererStencilRef.CrossFadeStencilRef_All };
int writeMask = (int)UniversalRendererStencilRef.CrossFadeStencilRef_All;
Debug.Assert(writeMask < 0x100); // 8 bits for stencil
for (int i = 0; i < m_StencilDitherMaskSeedMaterials.Length; ++i)
{
m_StencilDitherMaskSeedMaterials[i] = CoreUtils.CreateEngineMaterial(shader);
m_StencilDitherMaskSeedMaterials[i].SetInteger(_StencilDitherPattern, i + 1);
m_StencilDitherMaskSeedMaterials[i].SetFloat(_StencilWriteDitherMask, (float)writeMask);
m_StencilDitherMaskSeedMaterials[i].SetFloat(_StencilRefDitherMask, (float)stencilRefs[i]);
}
}
public void Dispose()
{
foreach (var m in m_StencilDitherMaskSeedMaterials)
CoreUtils.Destroy(m);
m_StencilDitherMaskSeedMaterials = null;
}
/// <summary>
/// Shared pass data for render graph
/// </summary>
private class PassData
{
public TextureHandle depthTarget;
public Material[] stencilDitherMaskSeedMaterials;
}
/// <summary>
/// Set render graph pass
/// </summary>
public void Render(RenderGraph renderGraph, ScriptableRenderContext context, TextureHandle depthTarget)
{
using (var builder = renderGraph.AddRasterRenderPass<PassData>("Prepare Cross Fade Stencil", out var passData, m_ProfilingSampler))
{
builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write);
passData.stencilDitherMaskSeedMaterials = m_StencilDitherMaskSeedMaterials;
passData.depthTarget = depthTarget;
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data.depthTarget, data.stencilDitherMaskSeedMaterials);
});
}
}
private static void ExecutePass(RasterCommandBuffer cmd, RTHandle depthTarget, Material[] stencilDitherMaskSeedMaterials)
{
Vector2Int scaledViewportSize = depthTarget.GetScaledSize(depthTarget.rtHandleProperties.currentViewportSize);
Rect viewport = new Rect(0.0f, 0.0f, scaledViewportSize.x, scaledViewportSize.y);
cmd.SetViewport(viewport);
// render one stencil value in each pass because SV_StencilRef is not fully supported.
for (int i = 0; i < stencilDitherMaskSeedMaterials.Length; ++i)
{
cmd.DrawProcedural(Matrix4x4.identity, stencilDitherMaskSeedMaterials[i], 0, MeshTopology.Triangles, 3, 1);
}
}
}
}

View File

@@ -0,0 +1,2 @@
fileFormatVersion: 2
guid: a246c6096818ee346906368fd723c112

View File

@@ -0,0 +1,50 @@
using System;
using UnityEngine.Rendering.Universal.Internal;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Applies relevant settings before rendering transparent objects
/// </summary>
internal class TransparentSettingsPass : ScriptableRenderPass
{
bool m_shouldReceiveShadows;
public TransparentSettingsPass(RenderPassEvent evt, bool shadowReceiveSupported)
{
profilingSampler = new ProfilingSampler("Set Transparent Parameters");
renderPassEvent = evt;
m_shouldReceiveShadows = shadowReceiveSupported;
}
public bool Setup()
{
// Currently we only need to enqueue this pass when the user
// doesn't want transparent objects to receive shadows
return !m_shouldReceiveShadows;
}
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
RasterCommandBuffer rasterCommandBuffer = CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer);
using (new ProfilingScope(rasterCommandBuffer, profilingSampler))
{
ExecutePass(rasterCommandBuffer);
}
}
public static void ExecutePass(RasterCommandBuffer rasterCommandBuffer)
{
// -----------------------------------------------------------
// This pass is only used when transparent objects should not
// receive shadows using the setting on the URP Renderer.
// This is controlled in the public bool Setup() function above.
// -----------------------------------------------------------
MainLightShadowCasterPass.SetShadowParamsForEmptyShadowmap(rasterCommandBuffer);
AdditionalLightsShadowCasterPass.SetShadowParamsForEmptyShadowmap(rasterCommandBuffer);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: faf05873da6e6fb489aa73a76d780c5a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,277 @@
#if ENABLE_VR && ENABLE_XR_MODULE
using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Render all objects that have a 'XRMotionVectors' pass into the given depth buffer and motionvec buffer.
/// </summary>
public class XRDepthMotionPass : ScriptableRenderPass
{
public const string k_MotionOnlyShaderTagIdName = "XRMotionVectors";
private static readonly ShaderTagId k_MotionOnlyShaderTagId = new ShaderTagId(k_MotionOnlyShaderTagIdName);
private static readonly int k_SpaceWarpNDCModifier = Shader.PropertyToID("_SpaceWarpNDCModifier");
private PassData m_PassData;
private RTHandle m_XRMotionVectorColor;
private TextureHandle xrMotionVectorColor;
private RTHandle m_XRMotionVectorDepth;
private TextureHandle xrMotionVectorDepth;
private bool m_XRSpaceWarpRightHandedNDC;
/// <summary>
/// Creates a new <c>XRDepthMotionPass</c> instance.
/// </summary>
/// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
/// <param name="xrMotionVector">The Shader used for rendering XR camera motion vector.</param>
/// <seealso cref="RenderPassEvent"/>
public XRDepthMotionPass(RenderPassEvent evt, Shader xrMotionVector)
{
base.profilingSampler = new ProfilingSampler(nameof(XRDepthMotionPass));
m_PassData = new PassData();
renderPassEvent = evt;
ResetMotionData();
m_XRMotionVectorMaterial = CoreUtils.CreateEngineMaterial(xrMotionVector);
xrMotionVectorColor = TextureHandle.nullHandle;
m_XRMotionVectorColor = null;
xrMotionVectorDepth = TextureHandle.nullHandle;
m_XRMotionVectorDepth = null;
}
private class PassData
{
internal RendererListHandle objMotionRendererList;
internal Matrix4x4[] previousViewProjectionStereo = new Matrix4x4[k_XRViewCount];
internal Matrix4x4[] viewProjectionStereo = new Matrix4x4[k_XRViewCount];
internal Material xrMotionVector;
}
/// View projection data
private const int k_XRViewCount = 2;
private Matrix4x4[] m_ViewProjection = new Matrix4x4[k_XRViewCount];
private Matrix4x4[] m_PreviousViewProjection = new Matrix4x4[k_XRViewCount];
private int m_LastFrameIndex;
// Motion Vector
private Material m_XRMotionVectorMaterial;
private static DrawingSettings GetObjectMotionDrawingSettings(Camera camera)
{
var sortingSettings = new SortingSettings(camera) { criteria = SortingCriteria.CommonOpaque };
// Notes: Usually, PerObjectData.MotionVectors will filter the renderer nodes to only draw moving objects.
// In our case, we use forceAllMotionVectorObjects in the filteringSettings to draw idle objects as well to populate depth.
var drawingSettings = new DrawingSettings(k_MotionOnlyShaderTagId, sortingSettings)
{
perObjectData = PerObjectData.MotionVectors,
enableDynamicBatching = false,
enableInstancing = true,
};
drawingSettings.SetShaderPassName(0, k_MotionOnlyShaderTagId);
return drawingSettings;
}
private void InitObjectMotionRendererLists(ref PassData passData, ref CullingResults cullResults, RenderGraph renderGraph, Camera camera)
{
var objectMotionDrawingSettings = GetObjectMotionDrawingSettings(camera);
// XRTODO: Extend RenderQueueRange.all to support transparent objects?
// URP current' doesn't support this, missing motion override for transparent materials.
var filteringSettings = new FilteringSettings(RenderQueueRange.opaque, camera.cullingMask);
// Also render game objects that are not moved since last frame to save depth prepass requirement for camera motion.
filteringSettings.forceAllMotionVectorObjects = true;
var renderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
RenderingUtils.CreateRendererListWithRenderStateBlock(renderGraph, ref cullResults, objectMotionDrawingSettings, filteringSettings, renderStateBlock, ref passData.objMotionRendererList);
}
/// <summary>
/// Initialize the RenderGraph pass data.
/// </summary>
/// <param name="passData"></param>
private void InitPassData(ref PassData passData, UniversalCameraData cameraData)
{
// XRTODO: Use XRSystem prevViewMatrix that is compatible with late latching. Currently blocked due to late latching engine side issue.
//var gpuP0 = GL.GetGPUProjectionMatrix(cameraData.xr.GetProjMatrix(0), false);
//var gpuP1 = GL.GetGPUProjectionMatrix(cameraData.xr.GetProjMatrix(1), false);
//passData.viewProjectionStereo[0] = gpuP0 * cameraData.xr.GetViewMatrix(0);
//passData.viewProjectionStereo[1] = gpuP1 * cameraData.xr.GetViewMatrix(1);
//passData.previousViewProjectionStereo[0] = gpuP0 * cameraData.xr.GetPrevViewMatrix(0);
//passData.previousViewProjectionStereo[1] = gpuP0 * cameraData.xr.GetPrevViewMatrix(1);
// Setup matrices and shader
passData.previousViewProjectionStereo = m_PreviousViewProjection;
passData.viewProjectionStereo = m_ViewProjection;
// Setup camera motion material
passData.xrMotionVector = m_XRMotionVectorMaterial;
}
/// <summary>
/// Import the XR motion color and depth targets into the RenderGraph.
/// </summary>
/// <param name="cameraData"> UniversalCameraData that holds XR pass data. </param>
private void ImportXRMotionColorAndDepth(RenderGraph renderGraph, UniversalCameraData cameraData)
{
var rtMotionId = cameraData.xr.motionVectorRenderTarget;
if (m_XRMotionVectorColor == null)
{
m_XRMotionVectorColor = RTHandles.Alloc(rtMotionId);
}
else if (m_XRMotionVectorColor.nameID != rtMotionId)
{
RTHandleStaticHelpers.SetRTHandleUserManagedWrapper(ref m_XRMotionVectorColor, rtMotionId);
}
// ID is the same since a RenderTexture encapsulates all the attachments, including both color+depth.
var depthId = cameraData.xr.motionVectorRenderTarget;
if (m_XRMotionVectorDepth == null)
{
m_XRMotionVectorDepth = RTHandles.Alloc(depthId);
}
else if (m_XRMotionVectorDepth.nameID != depthId)
{
RTHandleStaticHelpers.SetRTHandleUserManagedWrapper(ref m_XRMotionVectorDepth, depthId);
}
// Import motion color and depth into the render graph.
RenderTargetInfo importInfo = new RenderTargetInfo();
importInfo.width = cameraData.xr.motionVectorRenderTargetDesc.width;
importInfo.height = cameraData.xr.motionVectorRenderTargetDesc.height;
importInfo.volumeDepth = cameraData.xr.motionVectorRenderTargetDesc.volumeDepth;
importInfo.msaaSamples = cameraData.xr.motionVectorRenderTargetDesc.msaaSamples;
importInfo.format = cameraData.xr.motionVectorRenderTargetDesc.graphicsFormat;
RenderTargetInfo importInfoDepth = new RenderTargetInfo();
importInfoDepth = importInfo;
importInfoDepth.format = cameraData.xr.motionVectorRenderTargetDesc.depthStencilFormat;
ImportResourceParams importMotionColorParams = new ImportResourceParams();
importMotionColorParams.clearOnFirstUse = true;
importMotionColorParams.clearColor = Color.black;
importMotionColorParams.discardOnLastUse = false;
ImportResourceParams importMotionDepthParams = new ImportResourceParams();
importMotionDepthParams.clearOnFirstUse = true;
importMotionDepthParams.clearColor = Color.black;
importMotionDepthParams.discardOnLastUse = false;
xrMotionVectorColor = renderGraph.ImportTexture(m_XRMotionVectorColor, importInfo, importMotionColorParams);
xrMotionVectorDepth = renderGraph.ImportTexture(m_XRMotionVectorDepth, importInfoDepth, importMotionDepthParams);
m_XRSpaceWarpRightHandedNDC = cameraData.xr.spaceWarpRightHandedNDC;
}
#region Recording
internal void Render(RenderGraph renderGraph, ContextContainer frameData)
{
UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
// XR should be enabled and single pass should be enabled.
if (!cameraData.xr.enabled || !cameraData.xr.singlePassEnabled)
{
Debug.LogWarning("XRDepthMotionPass::Render is skipped because either XR is not enabled or singlepass rendering is not enabled.");
return;
}
// XR motion vector pass should be enabled.
if (!cameraData.xr.hasMotionVectorPass)
{
Debug.LogWarning("XRDepthMotionPass::Render is skipped because XR motion vector is not enabled for the current XRPass.");
return;
}
// First, import XR motion color and depth targets into the RenderGraph
ImportXRMotionColorAndDepth(renderGraph, cameraData);
// These flags are still required in SRP or the engine won't compute previous model matrices...
// If the flag hasn't been set yet on this camera, motion vectors will skip a frame.
cameraData.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
// Start recording the pass
using (var builder = renderGraph.AddRasterRenderPass<PassData>("XR Motion Pass", out var passData, base.profilingSampler))
{
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering);
// Setup Color and Depth attachments
builder.SetRenderAttachment(xrMotionVectorColor, 0, AccessFlags.Write);
builder.SetRenderAttachmentDepth(xrMotionVectorDepth, AccessFlags.Write);
// Setup RendererList
InitObjectMotionRendererLists(ref passData, ref renderingData.cullResults, renderGraph, cameraData.camera);
builder.UseRendererList(passData.objMotionRendererList);
// Allow setting up global matrix array
builder.AllowGlobalStateModification(true);
// Setup rest of the passData
InitPassData(ref passData, cameraData);
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
// Setup camera stereo buffer
context.cmd.SetGlobalMatrixArray(ShaderPropertyId.previousViewProjectionNoJitterStereo, data.previousViewProjectionStereo);
context.cmd.SetGlobalMatrixArray(ShaderPropertyId.viewProjectionNoJitterStereo, data.viewProjectionStereo);
// SpaceWarp is only available on Vulkan, so these values are always true. This is to support 2 versions of spacewarp
// One expects OpenGL NDC space motion vectors, the other expects Vulkan NDC space
context.cmd.SetGlobalFloat(k_SpaceWarpNDCModifier, m_XRSpaceWarpRightHandedNDC ? -1.0f : 1.0f);
// Object Motion for both static and dynamic objects, fill stencil for mv filled pixels.
context.cmd.DrawRendererList(passData.objMotionRendererList);
// Fill mv texturew with camera motion for pixels that don't have mv stencil bit.
context.cmd.DrawProcedural(Matrix4x4.identity, data.xrMotionVector, 0, MeshTopology.Triangles, 3, 1);
});
}
}
#endregion
private void ResetMotionData()
{
for (int i = 0; i < k_XRViewCount; i++)
{
m_ViewProjection[i] = Matrix4x4.identity;
m_PreviousViewProjection[i] = Matrix4x4.identity;
}
m_LastFrameIndex = -1;
}
/// <summary>
/// Update XRDepthMotionPass to use camera's view and projection matrix for motion vector calculation.
/// </summary>
/// <param name="cameraData"> The cameraData used for rendering to XR moition textures. </param>
public void Update(ref UniversalCameraData cameraData)
{
if (!cameraData.xr.enabled || !cameraData.xr.singlePassEnabled)
{
Debug.LogWarning("XRDepthMotionPass::Update is skipped because either XR is not enabled or singlepass rendering is not enabled.");
return;
}
if (m_LastFrameIndex != Time.frameCount)
{
{
var gpuVP0 = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(0), renderIntoTexture: false) * cameraData.GetViewMatrix(0);
var gpuVP1 = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(1), renderIntoTexture: false) * cameraData.GetViewMatrix(1);
m_PreviousViewProjection[0] = m_ViewProjection[0];
m_PreviousViewProjection[1] = m_ViewProjection[1];
m_ViewProjection[0] = gpuVP0;
m_ViewProjection[1] = gpuVP1;
}
m_LastFrameIndex = Time.frameCount;
}
}
/// <summary>
/// Cleans up resources used by the pass.
/// </summary>
public void Dispose()
{
m_XRMotionVectorColor?.Release();
m_XRMotionVectorDepth?.Release();
CoreUtils.Destroy(m_XRMotionVectorMaterial);
}
}
}
#endif

View File

@@ -0,0 +1,2 @@
fileFormatVersion: 2
guid: 56d37d33776d0f44586207d08936dd58

View File

@@ -0,0 +1,89 @@
#if ENABLE_VR && ENABLE_XR_MODULE
using System;
using UnityEngine.Rendering.RenderGraphModule;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Draw the XR occlusion mesh into the current depth buffer when XR is enabled.
/// </summary>
public class XROcclusionMeshPass : ScriptableRenderPass
{
PassData m_PassData;
/// <summary>
/// Used to indicate if the active target of the pass is the back buffer
/// </summary>
public bool m_IsActiveTargetBackBuffer; // TODO: Remove this when we remove non-RG path
public XROcclusionMeshPass(RenderPassEvent evt)
{
profilingSampler = new ProfilingSampler("Draw XR Occlusion Mesh");
renderPassEvent = evt;
m_PassData = new PassData();
m_IsActiveTargetBackBuffer = false;
}
private static void ExecutePass(RasterCommandBuffer cmd, PassData data)
{
if (data.xr.hasValidOcclusionMesh)
{
if (data.isActiveTargetBackBuffer)
cmd.SetViewport(data.xr.GetViewport());
data.xr.RenderOcclusionMesh(cmd, renderIntoTexture: !data.isActiveTargetBackBuffer);
}
}
/// <inheritdoc/>
[Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
m_PassData.xr = renderingData.cameraData.xr;
m_PassData.isActiveTargetBackBuffer = m_IsActiveTargetBackBuffer;
ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData);
}
private class PassData
{
internal XRPass xr;
internal TextureHandle cameraColorAttachment;
internal TextureHandle cameraDepthAttachment;
internal bool isActiveTargetBackBuffer;
}
internal void Render(RenderGraph renderGraph, ContextContainer frameData, in TextureHandle cameraColorAttachment, in TextureHandle cameraDepthAttachment)
{
UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
using (var builder = renderGraph.AddRasterRenderPass<PassData>(passName, out var passData, profilingSampler))
{
passData.xr = cameraData.xr;
passData.cameraColorAttachment = cameraColorAttachment;
builder.SetRenderAttachment(cameraColorAttachment, 0);
passData.cameraDepthAttachment = cameraDepthAttachment;
builder.SetRenderAttachmentDepth(cameraDepthAttachment, AccessFlags.Write);
passData.isActiveTargetBackBuffer = resourceData.isActiveTargetBackBuffer;
builder.AllowGlobalStateModification(true);
if (cameraData.xr.enabled)
{
bool passSupportsFoveation = cameraData.xrUniversal.canFoveateIntermediatePasses || resourceData.isActiveTargetBackBuffer;
builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
}
builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
{
ExecutePass(context.cmd, data);
});
return;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fc46a25201ce7e743bfdca7d07707357
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: