Singularity/Library/PackageCache/com.unity.render-pipelines..../Runtime/Passes/PostProcessPass.cs
2024-05-06 11:45:45 -07:00

1704 lines
81 KiB
C#
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

using System.Runtime.CompilerServices;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal
{
// TODO: xmldoc
public interface IPostProcessComponent
{
bool IsActive();
bool IsTileCompatible();
}
}
namespace UnityEngine.Rendering.Universal.Internal
{
// TODO: TAA
// TODO: Motion blur
/// <summary>
/// Renders the post-processing effect stack.
/// </summary>
public class PostProcessPass : ScriptableRenderPass
{
RenderTextureDescriptor m_Descriptor;
RenderTargetIdentifier m_Source;
RenderTargetHandle m_Destination;
RenderTargetHandle m_Depth;
RenderTargetHandle m_InternalLut;
const string k_RenderPostProcessingTag = "Render PostProcessing Effects";
const string k_RenderFinalPostProcessingTag = "Render Final PostProcessing Pass";
private static readonly ProfilingSampler m_ProfilingRenderPostProcessing = new ProfilingSampler(k_RenderPostProcessingTag);
private static readonly ProfilingSampler m_ProfilingRenderFinalPostProcessing = new ProfilingSampler(k_RenderFinalPostProcessingTag);
MaterialLibrary m_Materials;
PostProcessData m_Data;
// Builtin effects settings
DepthOfField m_DepthOfField;
MotionBlur m_MotionBlur;
PaniniProjection m_PaniniProjection;
Bloom m_Bloom;
LensDistortion m_LensDistortion;
ChromaticAberration m_ChromaticAberration;
Vignette m_Vignette;
ColorLookup m_ColorLookup;
ColorAdjustments m_ColorAdjustments;
Tonemapping m_Tonemapping;
FilmGrain m_FilmGrain;
// Misc
const int k_MaxPyramidSize = 16;
readonly GraphicsFormat m_DefaultHDRFormat;
bool m_UseRGBM;
readonly GraphicsFormat m_SMAAEdgeFormat;
readonly GraphicsFormat m_GaussianCoCFormat;
bool m_ResetHistory;
int m_DitheringTextureIndex;
RenderTargetIdentifier[] m_MRT2;
Vector4[] m_BokehKernel;
int m_BokehHash;
// Needed if the device changes its render target width/height (ex, Mobile platform allows change of orientation)
float m_BokehMaxRadius;
float m_BokehRCPAspect;
// True when this is the very last pass in the pipeline
bool m_IsFinalPass;
// If there's a final post process pass after this pass.
// If yes, Film Grain and Dithering are setup in the final pass, otherwise they are setup in this pass.
bool m_HasFinalPass;
// Some Android devices do not support sRGB backbuffer
// We need to do the conversion manually on those
bool m_EnableSRGBConversionIfNeeded;
// Option to use procedural draw instead of cmd.blit
bool m_UseDrawProcedural;
// Use Fast conversions between SRGB and Linear
bool m_UseFastSRGBLinearConversion;
// Blit to screen or color frontbuffer at the end
bool m_ResolveToScreen;
// Renderer is using swapbuffer system
bool m_UseSwapBuffer;
Material m_BlitMaterial;
public PostProcessPass(RenderPassEvent evt, PostProcessData data, Material blitMaterial)
{
base.profilingSampler = new ProfilingSampler(nameof(PostProcessPass));
renderPassEvent = evt;
m_Data = data;
m_Materials = new MaterialLibrary(data);
m_BlitMaterial = blitMaterial;
// Texture format pre-lookup
if (SystemInfo.IsFormatSupported(GraphicsFormat.B10G11R11_UFloatPack32, FormatUsage.Linear | FormatUsage.Render))
{
m_DefaultHDRFormat = GraphicsFormat.B10G11R11_UFloatPack32;
m_UseRGBM = false;
}
else
{
m_DefaultHDRFormat = QualitySettings.activeColorSpace == ColorSpace.Linear
? GraphicsFormat.R8G8B8A8_SRGB
: GraphicsFormat.R8G8B8A8_UNorm;
m_UseRGBM = true;
}
// Only two components are needed for edge render texture, but on some vendors four components may be faster.
if (SystemInfo.IsFormatSupported(GraphicsFormat.R8G8_UNorm, FormatUsage.Render) && SystemInfo.graphicsDeviceVendor.ToLowerInvariant().Contains("arm"))
m_SMAAEdgeFormat = GraphicsFormat.R8G8_UNorm;
else
m_SMAAEdgeFormat = GraphicsFormat.R8G8B8A8_UNorm;
if (SystemInfo.IsFormatSupported(GraphicsFormat.R16_UNorm, FormatUsage.Linear | FormatUsage.Render))
m_GaussianCoCFormat = GraphicsFormat.R16_UNorm;
else if (SystemInfo.IsFormatSupported(GraphicsFormat.R16_SFloat, FormatUsage.Linear | FormatUsage.Render))
m_GaussianCoCFormat = GraphicsFormat.R16_SFloat;
else // Expect CoC banding
m_GaussianCoCFormat = GraphicsFormat.R8_UNorm;
// Bloom pyramid shader ids - can't use a simple stackalloc in the bloom function as we
// unfortunately need to allocate strings
ShaderConstants._BloomMipUp = new int[k_MaxPyramidSize];
ShaderConstants._BloomMipDown = new int[k_MaxPyramidSize];
for (int i = 0; i < k_MaxPyramidSize; i++)
{
ShaderConstants._BloomMipUp[i] = Shader.PropertyToID("_BloomMipUp" + i);
ShaderConstants._BloomMipDown[i] = Shader.PropertyToID("_BloomMipDown" + i);
}
m_MRT2 = new RenderTargetIdentifier[2];
m_ResetHistory = true;
base.useNativeRenderPass = false;
}
public void Cleanup() => m_Materials.Cleanup();
public void Setup(in RenderTextureDescriptor baseDescriptor, in RenderTargetHandle source, bool resolveToScreen, in RenderTargetHandle depth, in RenderTargetHandle internalLut, bool hasFinalPass, bool enableSRGBConversion)
{
m_Descriptor = baseDescriptor;
m_Descriptor.useMipMap = false;
m_Descriptor.autoGenerateMips = false;
m_Source = source.id;
m_Depth = depth;
m_InternalLut = internalLut;
m_IsFinalPass = false;
m_HasFinalPass = hasFinalPass;
m_EnableSRGBConversionIfNeeded = enableSRGBConversion;
m_ResolveToScreen = resolveToScreen;
m_Destination = RenderTargetHandle.CameraTarget;
m_UseSwapBuffer = true;
}
public void Setup(in RenderTextureDescriptor baseDescriptor, in RenderTargetHandle source, RenderTargetHandle destination, in RenderTargetHandle depth, in RenderTargetHandle internalLut, bool hasFinalPass, bool enableSRGBConversion)
{
m_Descriptor = baseDescriptor;
m_Descriptor.useMipMap = false;
m_Descriptor.autoGenerateMips = false;
m_Source = source.id;
m_Destination = destination;
m_Depth = depth;
m_InternalLut = internalLut;
m_IsFinalPass = false;
m_HasFinalPass = hasFinalPass;
m_EnableSRGBConversionIfNeeded = enableSRGBConversion;
m_UseSwapBuffer = false;
}
public void SetupFinalPass(in RenderTargetHandle source, bool useSwapBuffer = false)
{
m_Source = source.id;
m_Destination = RenderTargetHandle.CameraTarget;
m_IsFinalPass = true;
m_HasFinalPass = false;
m_EnableSRGBConversionIfNeeded = true;
m_UseSwapBuffer = useSwapBuffer;
}
/// <inheritdoc/>
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
overrideCameraTarget = true;
if (m_Destination == RenderTargetHandle.CameraTarget)
return;
// If RenderTargetHandle already has a valid internal render target identifier, we shouldn't request a temp
if (m_Destination.HasInternalRenderTargetId())
return;
var desc = GetCompatibleDescriptor();
desc.depthBufferBits = 0;
cmd.GetTemporaryRT(m_Destination.id, desc, FilterMode.Point);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (m_Destination == RenderTargetHandle.CameraTarget)
return;
// Logic here matches the if check in OnCameraSetup
if (m_Destination.HasInternalRenderTargetId())
return;
cmd.ReleaseTemporaryRT(m_Destination.id);
}
public void ResetHistory()
{
m_ResetHistory = true;
}
public bool CanRunOnTile()
{
// Check builtin & user effects here
return false;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// Start by pre-fetching all builtin effect settings we need
// Some of the color-grading settings are only used in the color grading lut pass
var stack = VolumeManager.instance.stack;
m_DepthOfField = stack.GetComponent<DepthOfField>();
m_MotionBlur = stack.GetComponent<MotionBlur>();
m_PaniniProjection = stack.GetComponent<PaniniProjection>();
m_Bloom = stack.GetComponent<Bloom>();
m_LensDistortion = stack.GetComponent<LensDistortion>();
m_ChromaticAberration = stack.GetComponent<ChromaticAberration>();
m_Vignette = stack.GetComponent<Vignette>();
m_ColorLookup = stack.GetComponent<ColorLookup>();
m_ColorAdjustments = stack.GetComponent<ColorAdjustments>();
m_Tonemapping = stack.GetComponent<Tonemapping>();
m_FilmGrain = stack.GetComponent<FilmGrain>();
m_UseDrawProcedural = renderingData.cameraData.xr.enabled;
m_UseFastSRGBLinearConversion = renderingData.postProcessingData.useFastSRGBLinearConversion;
if (m_IsFinalPass)
{
var cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingRenderFinalPostProcessing))
{
RenderFinalPass(cmd, ref renderingData);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
else if (CanRunOnTile())
{
// TODO: Add a fast render path if only on-tile compatible effects are used and we're actually running on a platform that supports it
// Note: we can still work on-tile if FXAA is enabled, it'd be part of the final pass
}
else
{
// Regular render path (not on-tile) - we do everything in a single command buffer as it
// makes it easier to manage temporary targets' lifetime
var cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingRenderPostProcessing))
{
Render(cmd, ref renderingData);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
m_ResetHistory = false;
}
RenderTextureDescriptor GetCompatibleDescriptor()
=> GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_Descriptor.graphicsFormat);
RenderTextureDescriptor GetCompatibleDescriptor(int width, int height, GraphicsFormat format, int depthBufferBits = 0)
{
var desc = m_Descriptor;
desc.depthBufferBits = depthBufferBits;
desc.msaaSamples = 1;
desc.width = width;
desc.height = height;
desc.graphicsFormat = format;
return desc;
}
bool RequireSRGBConversionBlitToBackBuffer(CameraData cameraData)
{
return cameraData.requireSrgbConversion && m_EnableSRGBConversionIfNeeded;
}
private new void Blit(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, Material material, int passIndex = 0)
{
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, source);
if (m_UseDrawProcedural)
{
Vector4 scaleBias = new Vector4(1, 1, 0, 0);
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.SetRenderTarget(new RenderTargetIdentifier(destination, 0, CubemapFace.Unknown, -1),
RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store);
cmd.DrawProcedural(Matrix4x4.identity, material, passIndex, MeshTopology.Quads, 4, 1, null);
}
else
{
cmd.Blit(source, destination, material, passIndex);
}
}
private void DrawFullscreenMesh(CommandBuffer cmd, Material material, int passIndex)
{
if (m_UseDrawProcedural)
{
Vector4 scaleBias = new Vector4(1, 1, 0, 0);
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, material, passIndex, MeshTopology.Quads, 4, 1, null);
}
else
{
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, material, 0, passIndex);
}
}
void Render(CommandBuffer cmd, ref RenderingData renderingData)
{
ref CameraData cameraData = ref renderingData.cameraData;
ref ScriptableRenderer renderer = ref cameraData.renderer;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
//Check amount of swaps we have to do
//We blit back and forth without msaa untill the last blit.
bool useStopNan = cameraData.isStopNaNEnabled && m_Materials.stopNaN != null;
bool useSubPixeMorpAA = cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing && SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
var dofMaterial = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian ? m_Materials.gaussianDepthOfField : m_Materials.bokehDepthOfField;
bool useDepthOfField = m_DepthOfField.IsActive() && !isSceneViewCamera && dofMaterial != null;
bool useLensFlare = !LensFlareCommonSRP.Instance.IsEmpty();
bool useMotionBlur = m_MotionBlur.IsActive() && !isSceneViewCamera;
bool usePaniniProjection = m_PaniniProjection.IsActive() && !isSceneViewCamera;
int amountOfPassesRemaining = (useStopNan ? 1 : 0) + (useSubPixeMorpAA ? 1 : 0) + (useDepthOfField ? 1 : 0) + (useLensFlare ? 1 : 0) + (useMotionBlur ? 1 : 0) + (usePaniniProjection ? 1 : 0);
if (m_UseSwapBuffer && amountOfPassesRemaining > 0)
{
renderer.EnableSwapBufferMSAA(false);
}
// Don't use these directly unless you have a good reason to, use GetSource() and
// GetDestination() instead
bool tempTargetUsed = false;
bool tempTarget2Used = false;
RenderTargetIdentifier source = m_UseSwapBuffer ? renderer.cameraColorTarget : m_Source;
RenderTargetIdentifier destination = m_UseSwapBuffer ? renderer.GetCameraColorFrontBuffer(cmd) : -1;
RenderTargetIdentifier GetSource() => source;
RenderTargetIdentifier GetDestination()
{
if (m_UseSwapBuffer)
return destination;
else
{
if (destination == -1)
{
cmd.GetTemporaryRT(ShaderConstants._TempTarget, GetCompatibleDescriptor(), FilterMode.Bilinear);
destination = ShaderConstants._TempTarget;
tempTargetUsed = true;
}
else if (destination == m_Source && m_Descriptor.msaaSamples > 1)
{
// Avoid using m_Source.id as new destination, it may come with a depth buffer that we don't want, may have MSAA that we don't want etc
cmd.GetTemporaryRT(ShaderConstants._TempTarget2, GetCompatibleDescriptor(), FilterMode.Bilinear);
destination = ShaderConstants._TempTarget2;
tempTarget2Used = true;
}
return destination;
}
}
void Swap(ref ScriptableRenderer r)
{
--amountOfPassesRemaining;
if (m_UseSwapBuffer)
{
//we want the last blit to be to MSAA
if (amountOfPassesRemaining == 0 && !m_HasFinalPass)
{
r.EnableSwapBufferMSAA(true);
}
r.SwapColorBuffer(cmd);
source = r.cameraColorTarget;
destination = r.GetCameraColorFrontBuffer(cmd);
}
else
{
CoreUtils.Swap(ref source, ref destination);
}
}
// Setup projection matrix for cmd.DrawMesh()
cmd.SetGlobalMatrix(ShaderConstants._FullscreenProjMat, GL.GetGPUProjectionMatrix(Matrix4x4.identity, true));
// Optional NaN killer before post-processing kicks in
// stopNaN may be null on Adreno 3xx. It doesn't support full shader level 3.5, but SystemInfo.graphicsShaderLevel is 35.
if (useStopNan)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.StopNaNs)))
{
RenderingUtils.Blit(
cmd, GetSource(), GetDestination(), m_Materials.stopNaN, 0, m_UseDrawProcedural,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
Swap(ref renderer);
}
}
// Anti-aliasing
if (useSubPixeMorpAA)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.SMAA)))
{
DoSubpixelMorphologicalAntialiasing(ref cameraData, cmd, GetSource(), GetDestination());
Swap(ref renderer);
}
}
// Depth of Field
// Adreno 3xx SystemInfo.graphicsShaderLevel is 35, but instancing support is disabled due to buggy drivers.
// DOF shader uses #pragma target 3.5 which adds requirement for instancing support, thus marking the shader unsupported on those devices.
if (useDepthOfField)
{
var markerName = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian
? URPProfileId.GaussianDepthOfField
: URPProfileId.BokehDepthOfField;
using (new ProfilingScope(cmd, ProfilingSampler.Get(markerName)))
{
DoDepthOfField(cameraData.camera, cmd, GetSource(), GetDestination(), cameraData.pixelRect);
Swap(ref renderer);
}
}
// Motion blur
if (useMotionBlur)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MotionBlur)))
{
DoMotionBlur(cameraData, cmd, GetSource(), GetDestination());
Swap(ref renderer);
}
}
// Panini projection is done as a fullscreen pass after all depth-based effects are done
// and before bloom kicks in
if (usePaniniProjection)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.PaniniProjection)))
{
DoPaniniProjection(cameraData.camera, cmd, GetSource(), GetDestination());
Swap(ref renderer);
}
}
// Lens Flare
if (useLensFlare)
{
bool usePanini;
float paniniDistance;
float paniniCropToFit;
if (m_PaniniProjection.IsActive())
{
usePanini = true;
paniniDistance = m_PaniniProjection.distance.value;
paniniCropToFit = m_PaniniProjection.cropToFit.value;
}
else
{
usePanini = false;
paniniDistance = 1.0f;
paniniCropToFit = 1.0f;
}
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.LensFlareDataDriven)))
{
DoLensFlareDatadriven(cameraData.camera, cmd, GetSource(), usePanini, paniniDistance, paniniCropToFit);
}
}
// Combined post-processing stack
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.UberPostProcess)))
{
// Reset uber keywords
m_Materials.uber.shaderKeywords = null;
// Bloom goes first
bool bloomActive = m_Bloom.IsActive();
if (bloomActive)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.Bloom)))
SetupBloom(cmd, GetSource(), m_Materials.uber);
}
// Setup other effects constants
SetupLensDistortion(m_Materials.uber, isSceneViewCamera);
SetupChromaticAberration(m_Materials.uber);
SetupVignette(m_Materials.uber);
SetupColorGrading(cmd, ref renderingData, m_Materials.uber);
// Only apply dithering & grain if there isn't a final pass.
SetupGrain(cameraData, m_Materials.uber);
SetupDithering(cameraData, m_Materials.uber);
if (RequireSRGBConversionBlitToBackBuffer(cameraData))
m_Materials.uber.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
if (m_UseFastSRGBLinearConversion)
{
m_Materials.uber.EnableKeyword(ShaderKeywordStrings.UseFastSRGBLinearConversion);
}
GetActiveDebugHandler(renderingData)?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, ref cameraData, !m_HasFinalPass);
// Done with Uber, blit it
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, GetSource());
var colorLoadAction = RenderBufferLoadAction.DontCare;
if (m_Destination == RenderTargetHandle.CameraTarget && !cameraData.isDefaultViewport)
colorLoadAction = RenderBufferLoadAction.Load;
RenderTargetIdentifier targetDestination = m_UseSwapBuffer ? destination : m_Destination.id;
// Note: We rendering to "camera target" we need to get the cameraData.targetTexture as this will get the targetTexture of the camera stack.
// Overlay cameras need to output to the target described in the base camera while doing camera stack.
RenderTargetHandle cameraTargetHandle = RenderTargetHandle.GetCameraTarget(cameraData.xr);
RenderTargetIdentifier cameraTarget = (cameraData.targetTexture != null && !cameraData.xr.enabled) ? new RenderTargetIdentifier(cameraData.targetTexture) : cameraTargetHandle.Identifier();
// With camera stacking we not always resolve post to final screen as we might run post-processing in the middle of the stack.
if (m_UseSwapBuffer)
{
cameraTarget = (m_ResolveToScreen) ? cameraTarget : targetDestination;
}
else
{
cameraTarget = (m_Destination == RenderTargetHandle.CameraTarget) ? cameraTarget : m_Destination.Identifier();
m_ResolveToScreen = cameraData.resolveFinalTarget || (m_Destination == cameraTargetHandle || m_HasFinalPass == true);
}
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
cmd.SetRenderTarget(new RenderTargetIdentifier(cameraTarget, 0, CubemapFace.Unknown, -1),
colorLoadAction, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
bool isRenderToBackBufferTarget = cameraTarget == cameraData.xr.renderTarget && !cameraData.xr.renderTargetIsRenderTexture;
if (isRenderToBackBufferTarget)
cmd.SetViewport(cameraData.pixelRect);
// We y-flip if
// 1) we are bliting from render texture to back buffer and
// 2) renderTexture starts UV at top
bool yflip = isRenderToBackBufferTarget && SystemInfo.graphicsUVStartsAtTop;
Vector4 scaleBias = yflip ? new Vector4(1, -1, 0, 1) : new Vector4(1, 1, 0, 0);
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, m_Materials.uber, 0, MeshTopology.Quads, 4, 1, null);
//TODO: Implement swapbuffer in 2DRenderer so we can remove this
// For now, when render post - processing in the middle of the camera stack(not resolving to screen)
// we do an extra blit to ping pong results back to color texture. In future we should allow a Swap of the current active color texture
// in the pipeline to avoid this extra blit.
if (!m_ResolveToScreen && !m_UseSwapBuffer)
{
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, cameraTarget);
cmd.SetRenderTarget(new RenderTargetIdentifier(m_Source, 0, CubemapFace.Unknown, -1),
colorLoadAction, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
scaleBias = new Vector4(1, 1, 0, 0);;
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, m_BlitMaterial, 0, MeshTopology.Quads, 4, 1, null);
}
}
else
#endif
{
cmd.SetRenderTarget(cameraTarget, colorLoadAction, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cameraData.renderer.ConfigureCameraTarget(cameraTarget, cameraTarget);
cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
if ((m_Destination == RenderTargetHandle.CameraTarget && !m_UseSwapBuffer) || (m_ResolveToScreen && m_UseSwapBuffer))
cmd.SetViewport(cameraData.pixelRect);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_Materials.uber);
// TODO: Implement swapbuffer in 2DRenderer so we can remove this
// For now, when render post-processing in the middle of the camera stack (not resolving to screen)
// we do an extra blit to ping pong results back to color texture. In future we should allow a Swap of the current active color texture
// in the pipeline to avoid this extra blit.
if (!m_ResolveToScreen && !m_UseSwapBuffer)
{
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, cameraTarget);
cmd.SetRenderTarget(m_Source, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_BlitMaterial);
}
cmd.SetViewProjectionMatrices(cameraData.camera.worldToCameraMatrix, cameraData.camera.projectionMatrix);
}
if (m_UseSwapBuffer && !m_ResolveToScreen)
{
renderer.SwapColorBuffer(cmd);
}
// Cleanup
if (bloomActive)
cmd.ReleaseTemporaryRT(ShaderConstants._BloomMipUp[0]);
if (tempTargetUsed)
cmd.ReleaseTemporaryRT(ShaderConstants._TempTarget);
if (tempTarget2Used)
cmd.ReleaseTemporaryRT(ShaderConstants._TempTarget2);
}
}
private BuiltinRenderTextureType BlitDstDiscardContent(CommandBuffer cmd, RenderTargetIdentifier rt)
{
// We set depth to DontCare because rt might be the source of PostProcessing used as a temporary target
// Source typically comes with a depth buffer and right now we don't have a way to only bind the color attachment of a RenderTargetIdentifier
cmd.SetRenderTarget(new RenderTargetIdentifier(rt, 0, CubemapFace.Unknown, -1),
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
return BuiltinRenderTextureType.CurrentActive;
}
#region Sub-pixel Morphological Anti-aliasing
void DoSubpixelMorphologicalAntialiasing(ref CameraData cameraData, CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination)
{
var camera = cameraData.camera;
var pixelRect = cameraData.pixelRect;
var material = m_Materials.subpixelMorphologicalAntialiasing;
const int kStencilBit = 64;
// Globals
material.SetVector(ShaderConstants._Metrics, new Vector4(1f / m_Descriptor.width, 1f / m_Descriptor.height, m_Descriptor.width, m_Descriptor.height));
material.SetTexture(ShaderConstants._AreaTexture, m_Data.textures.smaaAreaTex);
material.SetTexture(ShaderConstants._SearchTexture, m_Data.textures.smaaSearchTex);
material.SetFloat(ShaderConstants._StencilRef, (float)kStencilBit);
material.SetFloat(ShaderConstants._StencilMask, (float)kStencilBit);
// Quality presets
material.shaderKeywords = null;
switch (cameraData.antialiasingQuality)
{
case AntialiasingQuality.Low:
material.EnableKeyword(ShaderKeywordStrings.SmaaLow);
break;
case AntialiasingQuality.Medium:
material.EnableKeyword(ShaderKeywordStrings.SmaaMedium);
break;
case AntialiasingQuality.High:
material.EnableKeyword(ShaderKeywordStrings.SmaaHigh);
break;
}
// Intermediate targets
RenderTargetIdentifier stencil; // We would only need stencil, no depth. But Unity doesn't support that.
int tempDepthBits;
if (m_Depth == RenderTargetHandle.CameraTarget || m_Descriptor.msaaSamples > 1)
{
// In case m_Depth is CameraTarget it may refer to the backbuffer and we can't use that as an attachment on all platforms
stencil = ShaderConstants._EdgeTexture;
tempDepthBits = 24;
}
else
{
stencil = m_Depth.Identifier();
tempDepthBits = 0;
}
cmd.GetTemporaryRT(ShaderConstants._EdgeTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_SMAAEdgeFormat, tempDepthBits), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._BlendTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, GraphicsFormat.R8G8B8A8_UNorm), FilterMode.Point);
// Prepare for manual blit
cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
cmd.SetViewport(pixelRect);
// Pass 1: Edge detection
cmd.SetRenderTarget(new RenderTargetIdentifier(ShaderConstants._EdgeTexture, 0, CubemapFace.Unknown, -1),
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, stencil,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store);
cmd.ClearRenderTarget(RTClearFlags.ColorStencil, Color.clear, 1.0f, 0);
cmd.SetGlobalTexture(ShaderConstants._ColorTexture, source);
DrawFullscreenMesh(cmd, material, 0);
// Pass 2: Blend weights
cmd.SetRenderTarget(new RenderTargetIdentifier(ShaderConstants._BlendTexture, 0, CubemapFace.Unknown, -1),
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, stencil,
RenderBufferLoadAction.Load, RenderBufferStoreAction.DontCare);
cmd.ClearRenderTarget(false, true, Color.clear);
cmd.SetGlobalTexture(ShaderConstants._ColorTexture, ShaderConstants._EdgeTexture);
DrawFullscreenMesh(cmd, material, 1);
// Pass 3: Neighborhood blending
cmd.SetRenderTarget(new RenderTargetIdentifier(destination, 0, CubemapFace.Unknown, -1),
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmd.SetGlobalTexture(ShaderConstants._ColorTexture, source);
cmd.SetGlobalTexture(ShaderConstants._BlendTexture, ShaderConstants._BlendTexture);
DrawFullscreenMesh(cmd, material, 2);
// Cleanup
cmd.ReleaseTemporaryRT(ShaderConstants._EdgeTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._BlendTexture);
cmd.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix);
}
#endregion
#region Depth Of Field
// TODO: CoC reprojection once TAA gets in LW
// TODO: Proper LDR/gamma support
void DoDepthOfField(Camera camera, CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, Rect pixelRect)
{
if (m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian)
DoGaussianDepthOfField(camera, cmd, source, destination, pixelRect);
else if (m_DepthOfField.mode.value == DepthOfFieldMode.Bokeh)
DoBokehDepthOfField(cmd, source, destination, pixelRect);
}
void DoGaussianDepthOfField(Camera camera, CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, Rect pixelRect)
{
int downSample = 2;
var material = m_Materials.gaussianDepthOfField;
int wh = m_Descriptor.width / downSample;
int hh = m_Descriptor.height / downSample;
float farStart = m_DepthOfField.gaussianStart.value;
float farEnd = Mathf.Max(farStart, m_DepthOfField.gaussianEnd.value);
// Assumes a radius of 1 is 1 at 1080p
// Past a certain radius our gaussian kernel will look very bad so we'll clamp it for
// very high resolutions (4K+).
float maxRadius = m_DepthOfField.gaussianMaxRadius.value * (wh / 1080f);
maxRadius = Mathf.Min(maxRadius, 2f);
CoreUtils.SetKeyword(material, ShaderKeywordStrings.HighQualitySampling, m_DepthOfField.highQualitySampling.value);
material.SetVector(ShaderConstants._CoCParams, new Vector3(farStart, farEnd, maxRadius));
// Temporary textures
cmd.GetTemporaryRT(ShaderConstants._FullCoCTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_GaussianCoCFormat), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._HalfCoCTexture, GetCompatibleDescriptor(wh, hh, m_GaussianCoCFormat), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._PingTexture, GetCompatibleDescriptor(wh, hh, m_DefaultHDRFormat), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._PongTexture, GetCompatibleDescriptor(wh, hh, m_DefaultHDRFormat), FilterMode.Bilinear);
// Note: fresh temporary RTs don't require explicit RenderBufferLoadAction.DontCare, only when they are reused (such as PingTexture)
PostProcessUtils.SetSourceSize(cmd, m_Descriptor);
cmd.SetGlobalVector(ShaderConstants._DownSampleScaleFactor, new Vector4(1.0f / downSample, 1.0f / downSample, downSample, downSample));
// Compute CoC
Blit(cmd, source, ShaderConstants._FullCoCTexture, material, 0);
// Downscale & prefilter color + coc
m_MRT2[0] = ShaderConstants._HalfCoCTexture;
m_MRT2[1] = ShaderConstants._PingTexture;
cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
cmd.SetViewport(pixelRect);
cmd.SetGlobalTexture(ShaderConstants._ColorTexture, source);
cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, ShaderConstants._FullCoCTexture);
cmd.SetRenderTarget(m_MRT2, ShaderConstants._HalfCoCTexture, 0, CubemapFace.Unknown, -1);
DrawFullscreenMesh(cmd, material, 1);
cmd.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix);
// Blur
cmd.SetGlobalTexture(ShaderConstants._HalfCoCTexture, ShaderConstants._HalfCoCTexture);
Blit(cmd, ShaderConstants._PingTexture, ShaderConstants._PongTexture, material, 2);
Blit(cmd, ShaderConstants._PongTexture, BlitDstDiscardContent(cmd, ShaderConstants._PingTexture), material, 3);
// Composite
cmd.SetGlobalTexture(ShaderConstants._ColorTexture, ShaderConstants._PingTexture);
cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, ShaderConstants._FullCoCTexture);
Blit(cmd, source, BlitDstDiscardContent(cmd, destination), material, 4);
// Cleanup
cmd.ReleaseTemporaryRT(ShaderConstants._FullCoCTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._HalfCoCTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._PingTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._PongTexture);
}
void PrepareBokehKernel(float maxRadius, float rcpAspect)
{
const int kRings = 4;
const int kPointsPerRing = 7;
// Check the existing array
if (m_BokehKernel == null)
m_BokehKernel = new Vector4[42];
// Fill in sample points (concentric circles transformed to rotated N-Gon)
int idx = 0;
float bladeCount = m_DepthOfField.bladeCount.value;
float curvature = 1f - m_DepthOfField.bladeCurvature.value;
float rotation = m_DepthOfField.bladeRotation.value * Mathf.Deg2Rad;
const float PI = Mathf.PI;
const float TWO_PI = Mathf.PI * 2f;
for (int ring = 1; ring < kRings; ring++)
{
float bias = 1f / kPointsPerRing;
float radius = (ring + bias) / (kRings - 1f + bias);
int points = ring * kPointsPerRing;
for (int point = 0; point < points; point++)
{
// Angle on ring
float phi = 2f * PI * point / points;
// Transform to rotated N-Gon
// Adapted from "CryEngine 3 Graphics Gems" [Sousa13]
float nt = Mathf.Cos(PI / bladeCount);
float dt = Mathf.Cos(phi - (TWO_PI / bladeCount) * Mathf.Floor((bladeCount * phi + Mathf.PI) / TWO_PI));
float r = radius * Mathf.Pow(nt / dt, curvature);
float u = r * Mathf.Cos(phi - rotation);
float v = r * Mathf.Sin(phi - rotation);
float uRadius = u * maxRadius;
float vRadius = v * maxRadius;
float uRadiusPowTwo = uRadius * uRadius;
float vRadiusPowTwo = vRadius * vRadius;
float kernelLength = Mathf.Sqrt((uRadiusPowTwo + vRadiusPowTwo));
float uRCP = uRadius * rcpAspect;
m_BokehKernel[idx] = new Vector4(uRadius, vRadius, kernelLength, uRCP);
idx++;
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float GetMaxBokehRadiusInPixels(float viewportHeight)
{
// Estimate the maximum radius of bokeh (empirically derived from the ring count)
const float kRadiusInPixels = 14f;
return Mathf.Min(0.05f, kRadiusInPixels / viewportHeight);
}
void DoBokehDepthOfField(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, Rect pixelRect)
{
int downSample = 2;
var material = m_Materials.bokehDepthOfField;
int wh = m_Descriptor.width / downSample;
int hh = m_Descriptor.height / downSample;
// "A Lens and Aperture Camera Model for Synthetic Image Generation" [Potmesil81]
float F = m_DepthOfField.focalLength.value / 1000f;
float A = m_DepthOfField.focalLength.value / m_DepthOfField.aperture.value;
float P = m_DepthOfField.focusDistance.value;
float maxCoC = (A * F) / (P - F);
float maxRadius = GetMaxBokehRadiusInPixels(m_Descriptor.height);
float rcpAspect = 1f / (wh / (float)hh);
CoreUtils.SetKeyword(material, ShaderKeywordStrings.UseFastSRGBLinearConversion, m_UseFastSRGBLinearConversion);
cmd.SetGlobalVector(ShaderConstants._CoCParams, new Vector4(P, maxCoC, maxRadius, rcpAspect));
// Prepare the bokeh kernel constant buffer
int hash = m_DepthOfField.GetHashCode();
if (hash != m_BokehHash || maxRadius != m_BokehMaxRadius || rcpAspect != m_BokehRCPAspect)
{
m_BokehHash = hash;
m_BokehMaxRadius = maxRadius;
m_BokehRCPAspect = rcpAspect;
PrepareBokehKernel(maxRadius, rcpAspect);
}
cmd.SetGlobalVectorArray(ShaderConstants._BokehKernel, m_BokehKernel);
// Temporary textures
cmd.GetTemporaryRT(ShaderConstants._FullCoCTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, GraphicsFormat.R8_UNorm), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._PingTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._PongTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear);
PostProcessUtils.SetSourceSize(cmd, m_Descriptor);
cmd.SetGlobalVector(ShaderConstants._DownSampleScaleFactor, new Vector4(1.0f / downSample, 1.0f / downSample, downSample, downSample));
float uvMargin = (1.0f / m_Descriptor.height) * downSample;
cmd.SetGlobalVector(ShaderConstants._BokehConstants, new Vector4(uvMargin, uvMargin * 2.0f));
// Compute CoC
Blit(cmd, source, ShaderConstants._FullCoCTexture, material, 0);
cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, ShaderConstants._FullCoCTexture);
// Downscale & prefilter color + coc
Blit(cmd, source, ShaderConstants._PingTexture, material, 1);
// Bokeh blur
Blit(cmd, ShaderConstants._PingTexture, ShaderConstants._PongTexture, material, 2);
// Post-filtering
Blit(cmd, ShaderConstants._PongTexture, BlitDstDiscardContent(cmd, ShaderConstants._PingTexture), material, 3);
// Composite
cmd.SetGlobalTexture(ShaderConstants._DofTexture, ShaderConstants._PingTexture);
Blit(cmd, source, BlitDstDiscardContent(cmd, destination), material, 4);
// Cleanup
cmd.ReleaseTemporaryRT(ShaderConstants._FullCoCTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._PingTexture);
cmd.ReleaseTemporaryRT(ShaderConstants._PongTexture);
}
#endregion
#region LensFlareDataDriven
static float GetLensFlareLightAttenuation(Light light, Camera cam, Vector3 wo)
{
// Must always be true
if (light != null)
{
switch (light.type)
{
case LightType.Directional:
return LensFlareCommonSRP.ShapeAttenuationDirLight(light.transform.forward, wo);
case LightType.Point:
return LensFlareCommonSRP.ShapeAttenuationPointLight();
case LightType.Spot:
return LensFlareCommonSRP.ShapeAttenuationSpotConeLight(light.transform.forward, wo, light.spotAngle, light.innerSpotAngle / 180.0f);
default:
return 1.0f;
}
}
return 1.0f;
}
void DoLensFlareDatadriven(Camera camera, CommandBuffer cmd, RenderTargetIdentifier source, bool usePanini, float paniniDistance, float paniniCropToFit)
{
var gpuView = camera.worldToCameraMatrix;
var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
// Zero out the translation component.
gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
var gpuVP = gpuNonJitteredProj * camera.worldToCameraMatrix;
LensFlareCommonSRP.DoLensFlareDataDrivenCommon(m_Materials.lensFlareDataDriven, LensFlareCommonSRP.Instance, camera, (float)m_Descriptor.width, (float)m_Descriptor.height,
usePanini, paniniDistance, paniniCropToFit,
true,
camera.transform.position,
gpuVP,
cmd, source,
(Light light, Camera cam, Vector3 wo) => { return GetLensFlareLightAttenuation(light, cam, wo); },
ShaderConstants._FlareOcclusionTex, ShaderConstants._FlareOcclusionIndex,
ShaderConstants._FlareTex, ShaderConstants._FlareColorValue,
ShaderConstants._FlareData0, ShaderConstants._FlareData1, ShaderConstants._FlareData2, ShaderConstants._FlareData3, ShaderConstants._FlareData4,
false);
}
#endregion
#region Motion Blur
static readonly int kShaderPropertyId_ViewProjM = Shader.PropertyToID("_ViewProjM");
static readonly int kShaderPropertyId_PrevViewProjM = Shader.PropertyToID("_PrevViewProjM");
static readonly int kShaderPropertyId_ViewProjMStereo = Shader.PropertyToID("_ViewProjMStereo");
static readonly int kShaderPropertyId_PrevViewProjMStereo = Shader.PropertyToID("_PrevViewProjMStereo");
void UpdateMotionBlurMatrices(ref Material material, Camera camera, XRPass xr)
{
MotionVectorsPersistentData motionData = null;
if (camera.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
motionData = additionalCameraData.motionVectorsPersistentData;
if (motionData == null)
return;
#if ENABLE_VR && ENABLE_XR_MODULE
if (xr.enabled && xr.singlePassEnabled)
{
material.SetMatrixArray(kShaderPropertyId_ViewProjMStereo, motionData.viewProjectionStereo);
if (m_ResetHistory)
material.SetMatrixArray(kShaderPropertyId_PrevViewProjMStereo, motionData.viewProjectionStereo);
else
material.SetMatrixArray(kShaderPropertyId_PrevViewProjMStereo, motionData.previousViewProjectionStereo);
}
else
#endif
{
int prevViewProjMIdx = 0;
#if ENABLE_VR && ENABLE_XR_MODULE
if (xr.enabled)
prevViewProjMIdx = xr.multipassId;
#endif
// This is needed because Blit will reset viewproj matrices to identity and UniversalRP currently
// relies on SetupCameraProperties instead of handling its own matrices.
// TODO: We need get rid of SetupCameraProperties and setup camera matrices in Universal
material.SetMatrix(kShaderPropertyId_ViewProjM, motionData.viewProjection);
if (m_ResetHistory)
material.SetMatrix(kShaderPropertyId_PrevViewProjM, motionData.viewProjection);
else
material.SetMatrix(kShaderPropertyId_PrevViewProjM, motionData.previousViewProjection);
}
}
void DoMotionBlur(CameraData cameraData, CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination)
{
var material = m_Materials.cameraMotionBlur;
UpdateMotionBlurMatrices(ref material, cameraData.camera, cameraData.xr);
material.SetFloat("_Intensity", m_MotionBlur.intensity.value);
material.SetFloat("_Clamp", m_MotionBlur.clamp.value);
PostProcessUtils.SetSourceSize(cmd, m_Descriptor);
Blit(cmd, source, BlitDstDiscardContent(cmd, destination), material, (int)m_MotionBlur.quality.value);
}
#endregion
#region Panini Projection
// Back-ported & adapted from the work of the Stockholm demo team - thanks Lasse!
void DoPaniniProjection(Camera camera, CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination)
{
float distance = m_PaniniProjection.distance.value;
var viewExtents = CalcViewExtents(camera);
var cropExtents = CalcCropExtents(camera, distance);
float scaleX = cropExtents.x / viewExtents.x;
float scaleY = cropExtents.y / viewExtents.y;
float scaleF = Mathf.Min(scaleX, scaleY);
float paniniD = distance;
float paniniS = Mathf.Lerp(1f, Mathf.Clamp01(scaleF), m_PaniniProjection.cropToFit.value);
var material = m_Materials.paniniProjection;
material.SetVector(ShaderConstants._Params, new Vector4(viewExtents.x, viewExtents.y, paniniD, paniniS));
material.EnableKeyword(
1f - Mathf.Abs(paniniD) > float.Epsilon
? ShaderKeywordStrings.PaniniGeneric : ShaderKeywordStrings.PaniniUnitDistance
);
Blit(cmd, source, BlitDstDiscardContent(cmd, destination), material);
}
Vector2 CalcViewExtents(Camera camera)
{
float fovY = camera.fieldOfView * Mathf.Deg2Rad;
float aspect = m_Descriptor.width / (float)m_Descriptor.height;
float viewExtY = Mathf.Tan(0.5f * fovY);
float viewExtX = aspect * viewExtY;
return new Vector2(viewExtX, viewExtY);
}
Vector2 CalcCropExtents(Camera camera, float d)
{
// given
// S----------- E--X-------
// | ` ~. /,´
// |-- --- Q
// | ,/ `
// 1 | ,´/ `
// | ,´ / ´
// | ,´ / ´
// |,` / ,
// O /
// | / ,
// d | /
// | / ,
// |/ .
// P
// | ´
// | , ´
// +- ´
//
// have X
// want to find E
float viewDist = 1f + d;
var projPos = CalcViewExtents(camera);
var projHyp = Mathf.Sqrt(projPos.x * projPos.x + 1f);
float cylDistMinusD = 1f / projHyp;
float cylDist = cylDistMinusD + d;
var cylPos = projPos * cylDistMinusD;
return cylPos * (viewDist / cylDist);
}
#endregion
#region Bloom
void SetupBloom(CommandBuffer cmd, RenderTargetIdentifier source, Material uberMaterial)
{
// Start at half-res
int tw = m_Descriptor.width >> 1;
int th = m_Descriptor.height >> 1;
// Determine the iteration count
int maxSize = Mathf.Max(tw, th);
int iterations = Mathf.FloorToInt(Mathf.Log(maxSize, 2f) - 1);
iterations -= m_Bloom.skipIterations.value;
int mipCount = Mathf.Clamp(iterations, 1, k_MaxPyramidSize);
// Pre-filtering parameters
float clamp = m_Bloom.clamp.value;
float threshold = Mathf.GammaToLinearSpace(m_Bloom.threshold.value);
float thresholdKnee = threshold * 0.5f; // Hardcoded soft knee
// Material setup
float scatter = Mathf.Lerp(0.05f, 0.95f, m_Bloom.scatter.value);
var bloomMaterial = m_Materials.bloom;
bloomMaterial.SetVector(ShaderConstants._Params, new Vector4(scatter, clamp, threshold, thresholdKnee));
CoreUtils.SetKeyword(bloomMaterial, ShaderKeywordStrings.BloomHQ, m_Bloom.highQualityFiltering.value);
CoreUtils.SetKeyword(bloomMaterial, ShaderKeywordStrings.UseRGBM, m_UseRGBM);
// Prefilter
var desc = GetCompatibleDescriptor(tw, th, m_DefaultHDRFormat);
cmd.GetTemporaryRT(ShaderConstants._BloomMipDown[0], desc, FilterMode.Bilinear);
cmd.GetTemporaryRT(ShaderConstants._BloomMipUp[0], desc, FilterMode.Bilinear);
Blit(cmd, source, ShaderConstants._BloomMipDown[0], bloomMaterial, 0);
// Downsample - gaussian pyramid
int lastDown = ShaderConstants._BloomMipDown[0];
for (int i = 1; i < mipCount; i++)
{
tw = Mathf.Max(1, tw >> 1);
th = Mathf.Max(1, th >> 1);
int mipDown = ShaderConstants._BloomMipDown[i];
int mipUp = ShaderConstants._BloomMipUp[i];
desc.width = tw;
desc.height = th;
cmd.GetTemporaryRT(mipDown, desc, FilterMode.Bilinear);
cmd.GetTemporaryRT(mipUp, desc, FilterMode.Bilinear);
// Classic two pass gaussian blur - use mipUp as a temporary target
// First pass does 2x downsampling + 9-tap gaussian
// Second pass does 9-tap gaussian using a 5-tap filter + bilinear filtering
Blit(cmd, lastDown, mipUp, bloomMaterial, 1);
Blit(cmd, mipUp, mipDown, bloomMaterial, 2);
lastDown = mipDown;
}
// Upsample (bilinear by default, HQ filtering does bicubic instead
for (int i = mipCount - 2; i >= 0; i--)
{
int lowMip = (i == mipCount - 2) ? ShaderConstants._BloomMipDown[i + 1] : ShaderConstants._BloomMipUp[i + 1];
int highMip = ShaderConstants._BloomMipDown[i];
int dst = ShaderConstants._BloomMipUp[i];
cmd.SetGlobalTexture(ShaderConstants._SourceTexLowMip, lowMip);
Blit(cmd, highMip, BlitDstDiscardContent(cmd, dst), bloomMaterial, 3);
}
// Cleanup
for (int i = 0; i < mipCount; i++)
{
cmd.ReleaseTemporaryRT(ShaderConstants._BloomMipDown[i]);
if (i > 0) cmd.ReleaseTemporaryRT(ShaderConstants._BloomMipUp[i]);
}
// Setup bloom on uber
var tint = m_Bloom.tint.value.linear;
var luma = ColorUtils.Luminance(tint);
tint = luma > 0f ? tint * (1f / luma) : Color.white;
var bloomParams = new Vector4(m_Bloom.intensity.value, tint.r, tint.g, tint.b);
uberMaterial.SetVector(ShaderConstants._Bloom_Params, bloomParams);
uberMaterial.SetFloat(ShaderConstants._Bloom_RGBM, m_UseRGBM ? 1f : 0f);
cmd.SetGlobalTexture(ShaderConstants._Bloom_Texture, ShaderConstants._BloomMipUp[0]);
// Setup lens dirtiness on uber
// Keep the aspect ratio correct & center the dirt texture, we don't want it to be
// stretched or squashed
var dirtTexture = m_Bloom.dirtTexture.value == null ? Texture2D.blackTexture : m_Bloom.dirtTexture.value;
float dirtRatio = dirtTexture.width / (float)dirtTexture.height;
float screenRatio = m_Descriptor.width / (float)m_Descriptor.height;
var dirtScaleOffset = new Vector4(1f, 1f, 0f, 0f);
float dirtIntensity = m_Bloom.dirtIntensity.value;
if (dirtRatio > screenRatio)
{
dirtScaleOffset.x = screenRatio / dirtRatio;
dirtScaleOffset.z = (1f - dirtScaleOffset.x) * 0.5f;
}
else if (screenRatio > dirtRatio)
{
dirtScaleOffset.y = dirtRatio / screenRatio;
dirtScaleOffset.w = (1f - dirtScaleOffset.y) * 0.5f;
}
uberMaterial.SetVector(ShaderConstants._LensDirt_Params, dirtScaleOffset);
uberMaterial.SetFloat(ShaderConstants._LensDirt_Intensity, dirtIntensity);
uberMaterial.SetTexture(ShaderConstants._LensDirt_Texture, dirtTexture);
// Keyword setup - a bit convoluted as we're trying to save some variants in Uber...
if (m_Bloom.highQualityFiltering.value)
uberMaterial.EnableKeyword(dirtIntensity > 0f ? ShaderKeywordStrings.BloomHQDirt : ShaderKeywordStrings.BloomHQ);
else
uberMaterial.EnableKeyword(dirtIntensity > 0f ? ShaderKeywordStrings.BloomLQDirt : ShaderKeywordStrings.BloomLQ);
}
#endregion
#region Lens Distortion
void SetupLensDistortion(Material material, bool isSceneView)
{
float amount = 1.6f * Mathf.Max(Mathf.Abs(m_LensDistortion.intensity.value * 100f), 1f);
float theta = Mathf.Deg2Rad * Mathf.Min(160f, amount);
float sigma = 2f * Mathf.Tan(theta * 0.5f);
var center = m_LensDistortion.center.value * 2f - Vector2.one;
var p1 = new Vector4(
center.x,
center.y,
Mathf.Max(m_LensDistortion.xMultiplier.value, 1e-4f),
Mathf.Max(m_LensDistortion.yMultiplier.value, 1e-4f)
);
var p2 = new Vector4(
m_LensDistortion.intensity.value >= 0f ? theta : 1f / theta,
sigma,
1f / m_LensDistortion.scale.value,
m_LensDistortion.intensity.value * 100f
);
material.SetVector(ShaderConstants._Distortion_Params1, p1);
material.SetVector(ShaderConstants._Distortion_Params2, p2);
if (m_LensDistortion.IsActive() && !isSceneView)
material.EnableKeyword(ShaderKeywordStrings.Distortion);
}
#endregion
#region Chromatic Aberration
void SetupChromaticAberration(Material material)
{
material.SetFloat(ShaderConstants._Chroma_Params, m_ChromaticAberration.intensity.value * 0.05f);
if (m_ChromaticAberration.IsActive())
material.EnableKeyword(ShaderKeywordStrings.ChromaticAberration);
}
#endregion
#region Vignette
void SetupVignette(Material material)
{
var color = m_Vignette.color.value;
var center = m_Vignette.center.value;
var aspectRatio = m_Descriptor.width / (float)m_Descriptor.height;
var v1 = new Vector4(
color.r, color.g, color.b,
m_Vignette.rounded.value ? aspectRatio : 1f
);
var v2 = new Vector4(
center.x, center.y,
m_Vignette.intensity.value * 3f,
m_Vignette.smoothness.value * 5f
);
material.SetVector(ShaderConstants._Vignette_Params1, v1);
material.SetVector(ShaderConstants._Vignette_Params2, v2);
}
#endregion
#region Color Grading
void SetupColorGrading(CommandBuffer cmd, ref RenderingData renderingData, Material material)
{
ref var postProcessingData = ref renderingData.postProcessingData;
bool hdr = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
int lutHeight = postProcessingData.lutSize;
int lutWidth = lutHeight * lutHeight;
// Source material setup
float postExposureLinear = Mathf.Pow(2f, m_ColorAdjustments.postExposure.value);
cmd.SetGlobalTexture(ShaderConstants._InternalLut, m_InternalLut.Identifier());
material.SetVector(ShaderConstants._Lut_Params, new Vector4(1f / lutWidth, 1f / lutHeight, lutHeight - 1f, postExposureLinear));
material.SetTexture(ShaderConstants._UserLut, m_ColorLookup.texture.value);
material.SetVector(ShaderConstants._UserLut_Params, !m_ColorLookup.IsActive()
? Vector4.zero
: new Vector4(1f / m_ColorLookup.texture.value.width,
1f / m_ColorLookup.texture.value.height,
m_ColorLookup.texture.value.height - 1f,
m_ColorLookup.contribution.value)
);
if (hdr)
{
material.EnableKeyword(ShaderKeywordStrings.HDRGrading);
}
else
{
switch (m_Tonemapping.mode.value)
{
case TonemappingMode.Neutral: material.EnableKeyword(ShaderKeywordStrings.TonemapNeutral); break;
case TonemappingMode.ACES: material.EnableKeyword(ShaderKeywordStrings.TonemapACES); break;
default: break; // None
}
}
}
#endregion
#region Film Grain
void SetupGrain(in CameraData cameraData, Material material)
{
if (!m_HasFinalPass && m_FilmGrain.IsActive())
{
material.EnableKeyword(ShaderKeywordStrings.FilmGrain);
PostProcessUtils.ConfigureFilmGrain(
m_Data,
m_FilmGrain,
cameraData.pixelWidth, cameraData.pixelHeight,
material
);
}
}
#endregion
#region 8-bit Dithering
void SetupDithering(in CameraData cameraData, Material material)
{
if (!m_HasFinalPass && cameraData.isDitheringEnabled)
{
material.EnableKeyword(ShaderKeywordStrings.Dithering);
m_DitheringTextureIndex = PostProcessUtils.ConfigureDithering(
m_Data,
m_DitheringTextureIndex,
cameraData.pixelWidth, cameraData.pixelHeight,
material
);
}
}
#endregion
#region Final pass
void RenderFinalPass(CommandBuffer cmd, ref RenderingData renderingData)
{
ref var cameraData = ref renderingData.cameraData;
var material = m_Materials.finalPass;
material.shaderKeywords = null;
PostProcessUtils.SetSourceSize(cmd, cameraData.cameraTargetDescriptor);
SetupGrain(cameraData, material);
SetupDithering(cameraData, material);
if (RequireSRGBConversionBlitToBackBuffer(cameraData))
material.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
GetActiveDebugHandler(renderingData)?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, ref cameraData, m_IsFinalPass);
if (!m_UseSwapBuffer)
{
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, m_Source);
}
else if (m_Source == cameraData.renderer.GetCameraColorFrontBuffer(cmd))
{
m_Source = cameraData.renderer.cameraColorTarget;
}
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, m_Source);
var colorLoadAction = cameraData.isDefaultViewport ? RenderBufferLoadAction.DontCare : RenderBufferLoadAction.Load;
bool isScalingSetupUsed = false;
bool isUpscaledTextureUsed = false;
bool isFxaaEnabled = (cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing);
if (cameraData.imageScalingMode != ImageScalingMode.None)
{
// FSR is only considered "enabled" when we're performing upscaling. (downscaling uses a linear filter unconditionally)
bool isFsrEnabled = ((cameraData.imageScalingMode == ImageScalingMode.Upscaling) && (cameraData.upscalingFilter == ImageUpscalingFilter.FSR));
// When FXAA is enabled in scaled renders, we execute it in a separate blit since it's not designed to be used in
// situations where the input and output resolutions do not match.
// When FSR is active, we always need an additional pass since it has a very particular color encoding requirement.
// NOTE: An ideal implementation could inline this color conversion logic into the UberPost pass, but the current code structure would make
// this process very complex. Specifically, we'd need to guarantee that the uber post output is always written to a UNORM format render
// target in order to preserve the precision of specially encoded color data.
bool isSetupRequired = (isFxaaEnabled || isFsrEnabled);
// Make sure to remove any MSAA and attached depth buffers from the temporary render targets
var tempRtDesc = cameraData.cameraTargetDescriptor;
tempRtDesc.msaaSamples = 1;
tempRtDesc.depthBufferBits = 0;
// Select a UNORM format since we've already performed tonemapping. (Values are in 0-1 range)
// This improves precision and is required if we want to avoid excessive banding when FSR is in use.
tempRtDesc.graphicsFormat = UniversalRenderPipeline.MakeUnormRenderTextureGraphicsFormat();
m_Materials.scalingSetup.shaderKeywords = null;
var sourceRtId = m_Source;
if (isSetupRequired)
{
if (isFxaaEnabled)
{
m_Materials.scalingSetup.EnableKeyword(ShaderKeywordStrings.Fxaa);
}
if (isFsrEnabled)
{
m_Materials.scalingSetup.EnableKeyword(ShaderKeywordStrings.Gamma20);
}
cmd.GetTemporaryRT(ShaderConstants._ScalingSetupTexture, tempRtDesc, FilterMode.Point);
isScalingSetupUsed = true;
Blit(cmd, m_Source, ShaderConstants._ScalingSetupTexture, m_Materials.scalingSetup);
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, ShaderConstants._ScalingSetupTexture);
sourceRtId = ShaderConstants._ScalingSetupTexture;
}
switch (cameraData.imageScalingMode)
{
case ImageScalingMode.Upscaling:
{
// In the upscaling case, set material keywords based on the selected upscaling filter
// Note: If FSR is enabled, we go down this path regardless of the current render scale. We do this because
// FSR still provides visual benefits at 100% scale. This will also make the transition between 99% and 100%
// scale less obvious for cases where FSR is used with dynamic resolution scaling.
switch (cameraData.upscalingFilter)
{
case ImageUpscalingFilter.Point:
{
material.EnableKeyword(ShaderKeywordStrings.PointSampling);
break;
}
case ImageUpscalingFilter.Linear:
{
// Do nothing as linear is the default filter in the shader
break;
}
case ImageUpscalingFilter.FSR:
{
m_Materials.easu.shaderKeywords = null;
var upscaleRtDesc = tempRtDesc;
upscaleRtDesc.width = cameraData.pixelWidth;
upscaleRtDesc.height = cameraData.pixelHeight;
// EASU
cmd.GetTemporaryRT(ShaderConstants._UpscaledTexture, upscaleRtDesc, FilterMode.Point);
isUpscaledTextureUsed = true;
var fsrInputSize = new Vector2(cameraData.cameraTargetDescriptor.width, cameraData.cameraTargetDescriptor.height);
var fsrOutputSize = new Vector2(cameraData.pixelWidth, cameraData.pixelHeight);
FSRUtils.SetEasuConstants(cmd, fsrInputSize, fsrInputSize, fsrOutputSize);
Blit(cmd, sourceRtId, ShaderConstants._UpscaledTexture, m_Materials.easu);
// RCAS
// Use the override value if it's available, otherwise use the default.
float sharpness = cameraData.fsrOverrideSharpness ? cameraData.fsrSharpness : FSRUtils.kDefaultSharpnessLinear;
// Set up the parameters for the RCAS pass unless the sharpness value indicates that it wont have any effect.
if (cameraData.fsrSharpness > 0.0f)
{
// RCAS is performed during the final post blit, but we set up the parameters here for better logical grouping.
material.EnableKeyword(ShaderKeywordStrings.Rcas);
FSRUtils.SetRcasConstantsLinear(cmd, sharpness);
}
// Update the source texture for the next operation
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, ShaderConstants._UpscaledTexture);
PostProcessUtils.SetSourceSize(cmd, upscaleRtDesc);
break;
}
}
break;
}
case ImageScalingMode.Downscaling:
{
// In the downscaling case, we don't perform any sort of filter override logic since we always want linear filtering
// and it's already the default option in the shader.
break;
}
}
}
else if (isFxaaEnabled)
{
// In unscaled renders, FXAA can be safely performed in the FinalPost shader
material.EnableKeyword(ShaderKeywordStrings.Fxaa);
}
RenderTargetHandle cameraTargetHandle = RenderTargetHandle.GetCameraTarget(cameraData.xr);
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
RenderTargetIdentifier cameraTarget = cameraTargetHandle.Identifier();
//Blit(cmd, m_Source.Identifier(), BuiltinRenderTextureType.CurrentActive, material);
bool isRenderToBackBufferTarget = cameraTarget == cameraData.xr.renderTarget && !cameraData.xr.renderTargetIsRenderTexture;
// We y-flip if
// 1) we are bliting from render texture to back buffer and
// 2) renderTexture starts UV at top
bool yflip = isRenderToBackBufferTarget && SystemInfo.graphicsUVStartsAtTop;
Vector4 scaleBias = yflip ? new Vector4(1, -1, 0, 1) : new Vector4(1, 1, 0, 0);
cmd.SetRenderTarget(new RenderTargetIdentifier(cameraTarget, 0, CubemapFace.Unknown, -1),
colorLoadAction, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmd.SetViewport(cameraData.pixelRect);
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, material, 0, MeshTopology.Quads, 4, 1, null);
}
else
#endif
{
// Note: We need to get the cameraData.targetTexture as this will get the targetTexture of the camera stack.
// Overlay cameras need to output to the target described in the base camera while doing camera stack.
RenderTargetIdentifier cameraTarget = (cameraData.targetTexture != null) ? new RenderTargetIdentifier(cameraData.targetTexture) : cameraTargetHandle.Identifier();
cmd.SetRenderTarget(cameraTarget, colorLoadAction, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
cmd.SetViewport(cameraData.pixelRect);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, material);
cmd.SetViewProjectionMatrices(cameraData.camera.worldToCameraMatrix, cameraData.camera.projectionMatrix);
cameraData.renderer.ConfigureCameraTarget(cameraTarget, cameraTarget);
}
if (isUpscaledTextureUsed)
{
cmd.ReleaseTemporaryRT(ShaderConstants._UpscaledTexture);
}
if (isScalingSetupUsed)
{
cmd.ReleaseTemporaryRT(ShaderConstants._ScalingSetupTexture);
}
}
#endregion
#region Internal utilities
class MaterialLibrary
{
public readonly Material stopNaN;
public readonly Material subpixelMorphologicalAntialiasing;
public readonly Material gaussianDepthOfField;
public readonly Material bokehDepthOfField;
public readonly Material cameraMotionBlur;
public readonly Material paniniProjection;
public readonly Material bloom;
public readonly Material scalingSetup;
public readonly Material easu;
public readonly Material uber;
public readonly Material finalPass;
public readonly Material lensFlareDataDriven;
public MaterialLibrary(PostProcessData data)
{
stopNaN = Load(data.shaders.stopNanPS);
subpixelMorphologicalAntialiasing = Load(data.shaders.subpixelMorphologicalAntialiasingPS);
gaussianDepthOfField = Load(data.shaders.gaussianDepthOfFieldPS);
bokehDepthOfField = Load(data.shaders.bokehDepthOfFieldPS);
cameraMotionBlur = Load(data.shaders.cameraMotionBlurPS);
paniniProjection = Load(data.shaders.paniniProjectionPS);
bloom = Load(data.shaders.bloomPS);
scalingSetup = Load(data.shaders.scalingSetupPS);
easu = Load(data.shaders.easuPS);
uber = Load(data.shaders.uberPostPS);
finalPass = Load(data.shaders.finalPostPassPS);
lensFlareDataDriven = Load(data.shaders.LensFlareDataDrivenPS);
}
Material Load(Shader shader)
{
if (shader == null)
{
Debug.LogErrorFormat($"Missing shader. {GetType().DeclaringType.Name} render pass will not execute. Check for missing reference in the renderer resources.");
return null;
}
else if (!shader.isSupported)
{
return null;
}
return CoreUtils.CreateEngineMaterial(shader);
}
internal void Cleanup()
{
CoreUtils.Destroy(stopNaN);
CoreUtils.Destroy(subpixelMorphologicalAntialiasing);
CoreUtils.Destroy(gaussianDepthOfField);
CoreUtils.Destroy(bokehDepthOfField);
CoreUtils.Destroy(cameraMotionBlur);
CoreUtils.Destroy(paniniProjection);
CoreUtils.Destroy(bloom);
CoreUtils.Destroy(scalingSetup);
CoreUtils.Destroy(easu);
CoreUtils.Destroy(uber);
CoreUtils.Destroy(finalPass);
}
}
// Precomputed shader ids to same some CPU cycles (mostly affects mobile)
static class ShaderConstants
{
public static readonly int _TempTarget = Shader.PropertyToID("_TempTarget");
public static readonly int _TempTarget2 = Shader.PropertyToID("_TempTarget2");
public static readonly int _StencilRef = Shader.PropertyToID("_StencilRef");
public static readonly int _StencilMask = Shader.PropertyToID("_StencilMask");
public static readonly int _FullCoCTexture = Shader.PropertyToID("_FullCoCTexture");
public static readonly int _HalfCoCTexture = Shader.PropertyToID("_HalfCoCTexture");
public static readonly int _DofTexture = Shader.PropertyToID("_DofTexture");
public static readonly int _CoCParams = Shader.PropertyToID("_CoCParams");
public static readonly int _BokehKernel = Shader.PropertyToID("_BokehKernel");
public static readonly int _BokehConstants = Shader.PropertyToID("_BokehConstants");
public static readonly int _PongTexture = Shader.PropertyToID("_PongTexture");
public static readonly int _PingTexture = Shader.PropertyToID("_PingTexture");
public static readonly int _Metrics = Shader.PropertyToID("_Metrics");
public static readonly int _AreaTexture = Shader.PropertyToID("_AreaTexture");
public static readonly int _SearchTexture = Shader.PropertyToID("_SearchTexture");
public static readonly int _EdgeTexture = Shader.PropertyToID("_EdgeTexture");
public static readonly int _BlendTexture = Shader.PropertyToID("_BlendTexture");
public static readonly int _ColorTexture = Shader.PropertyToID("_ColorTexture");
public static readonly int _Params = Shader.PropertyToID("_Params");
public static readonly int _SourceTexLowMip = Shader.PropertyToID("_SourceTexLowMip");
public static readonly int _Bloom_Params = Shader.PropertyToID("_Bloom_Params");
public static readonly int _Bloom_RGBM = Shader.PropertyToID("_Bloom_RGBM");
public static readonly int _Bloom_Texture = Shader.PropertyToID("_Bloom_Texture");
public static readonly int _LensDirt_Texture = Shader.PropertyToID("_LensDirt_Texture");
public static readonly int _LensDirt_Params = Shader.PropertyToID("_LensDirt_Params");
public static readonly int _LensDirt_Intensity = Shader.PropertyToID("_LensDirt_Intensity");
public static readonly int _Distortion_Params1 = Shader.PropertyToID("_Distortion_Params1");
public static readonly int _Distortion_Params2 = Shader.PropertyToID("_Distortion_Params2");
public static readonly int _Chroma_Params = Shader.PropertyToID("_Chroma_Params");
public static readonly int _Vignette_Params1 = Shader.PropertyToID("_Vignette_Params1");
public static readonly int _Vignette_Params2 = Shader.PropertyToID("_Vignette_Params2");
public static readonly int _Lut_Params = Shader.PropertyToID("_Lut_Params");
public static readonly int _UserLut_Params = Shader.PropertyToID("_UserLut_Params");
public static readonly int _InternalLut = Shader.PropertyToID("_InternalLut");
public static readonly int _UserLut = Shader.PropertyToID("_UserLut");
public static readonly int _DownSampleScaleFactor = Shader.PropertyToID("_DownSampleScaleFactor");
public static readonly int _FlareOcclusionTex = Shader.PropertyToID("_FlareOcclusionTex");
public static readonly int _FlareOcclusionIndex = Shader.PropertyToID("_FlareOcclusionIndex");
public static readonly int _FlareTex = Shader.PropertyToID("_FlareTex");
public static readonly int _FlareColorValue = Shader.PropertyToID("_FlareColorValue");
public static readonly int _FlareData0 = Shader.PropertyToID("_FlareData0");
public static readonly int _FlareData1 = Shader.PropertyToID("_FlareData1");
public static readonly int _FlareData2 = Shader.PropertyToID("_FlareData2");
public static readonly int _FlareData3 = Shader.PropertyToID("_FlareData3");
public static readonly int _FlareData4 = Shader.PropertyToID("_FlareData4");
public static readonly int _FlareData5 = Shader.PropertyToID("_FlareData5");
public static readonly int _FullscreenProjMat = Shader.PropertyToID("_FullscreenProjMat");
public static readonly int _ScalingSetupTexture = Shader.PropertyToID("_ScalingSetupTexture");
public static readonly int _UpscaledTexture = Shader.PropertyToID("_UpscaledTexture");
public static int[] _BloomMipUp;
public static int[] _BloomMipDown;
}
#endregion
}
}