using System; using System.Collections.Generic; using System.Linq; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { #if (ENABLE_VR_MODULE && ENABLE_VR) using XRSettings = UnityEngine.XR.XRSettings; #endif /// /// This is the component responsible for rendering post-processing effects. It must be put on /// every camera you want post-processing to be applied to. /// #if UNITY_2018_3_OR_NEWER [ExecuteAlways] #else [ExecuteInEditMode] #endif [DisallowMultipleComponent, ImageEffectAllowedInSceneView] [AddComponentMenu("Rendering/Post-process Layer", 1000)] [RequireComponent(typeof(Camera))] public sealed class PostProcessLayer : MonoBehaviour { /// /// Builtin anti-aliasing methods. /// public enum Antialiasing { /// /// No anti-aliasing. /// None, /// /// Fast Approximate Anti-aliasing (FXAA). Fast but low quality. /// FastApproximateAntialiasing, /// /// Subpixel Morphological Anti-aliasing (SMAA). Slower but higher quality than FXAA. /// SubpixelMorphologicalAntialiasing, /// /// Temporal Anti-aliasing (TAA). As fast as SMAA but generally higher quality. Because /// of it's temporal nature, it can introduce ghosting artifacts on fast moving objects /// in highly contrasted areas. /// TemporalAntialiasing } /// /// This is transform that will be drive the volume blending feature. In some cases you may /// want to use a transform other than the camera, e.g. for a top down game you'll want the /// player character to drive the blending instead of the actual camera transform. /// Setting this field to null will disable local volumes for this layer (global ones /// will still work). /// public Transform volumeTrigger; /// /// A mask of layers to consider for volume blending. It allows you to do volume filtering /// and is especially useful to optimize volume traversal. You should always have your /// volumes in dedicated layers instead of the default one for best performances. /// public LayerMask volumeLayer; /// /// If true, it will kill any invalid / NaN pixel and replace it with a black color /// before post-processing is applied. It's generally a good idea to keep this enabled to /// avoid post-processing artifacts cause by broken data in the scene. /// public bool stopNaNPropagation = true; /// /// If true, it will render straight to the backbuffer and save the final blit done /// by the engine. This has less overhead and will improve performance on lower-end platforms /// (like mobiles) but breaks compatibility with legacy image effect that use OnRenderImage. /// public bool finalBlitToCameraTarget = false; /// /// The anti-aliasing method to use for this camera. By default it's set to None. /// public Antialiasing antialiasingMode = Antialiasing.None; /// /// Temporal Anti-aliasing settings for this camera. /// public TemporalAntialiasing temporalAntialiasing; /// /// Subpixel Morphological Anti-aliasing settings for this camera. /// public SubpixelMorphologicalAntialiasing subpixelMorphologicalAntialiasing; /// /// Fast Approximate Anti-aliasing settings for this camera. /// public FastApproximateAntialiasing fastApproximateAntialiasing; /// /// Fog settings for this camera. /// public Fog fog; Dithering dithering; /// /// The debug layer is reponsible for rendering debugging information on the screen. It will /// only be used if this layer is referenced in a component. /// /// public PostProcessDebugLayer debugLayer; [SerializeField] PostProcessResources m_Resources; // Some juggling needed to track down reference to the resource asset when loaded from asset // bundle (guid conflict) [NonSerialized] PostProcessResources m_OldResources; // UI states [UnityEngine.Scripting.Preserve] [SerializeField] bool m_ShowToolkit; [UnityEngine.Scripting.Preserve] [SerializeField] bool m_ShowCustomSorter; /// /// If true, it will stop applying post-processing effects just before color grading /// is applied. This is used internally to export to EXR without color grading. /// public bool breakBeforeColorGrading = false; // Pre-ordered custom user effects // These are automatically populated and made to work properly with the serialization // system AND the editor. Modify at your own risk. /// /// A wrapper around bundles to allow their serialization in lists. /// [Serializable] public sealed class SerializedBundleRef { /// /// The assembly qualified name used for serialization as we can't serialize the types /// themselves. /// public string assemblyQualifiedName; // We only need this at init time anyway so it's fine /// /// A reference to the bundle itself. /// public PostProcessBundle bundle; // Not serialized, is set/reset when deserialization kicks in } [SerializeField] List m_BeforeTransparentBundles; [SerializeField] List m_BeforeStackBundles; [SerializeField] List m_AfterStackBundles; /// /// Pre-ordered effects mapped to available injection points. /// public Dictionary> sortedBundles { get; private set; } /// /// The current flags set on the camera for the built-in render pipeline. /// public DepthTextureMode cameraDepthFlags { get; private set; } // We need to keep track of bundle initialization because for some obscure reason, on // assembly reload a MonoBehavior's Editor OnEnable will be called BEFORE the MonoBehavior's // own OnEnable... So we'll use it to pre-init bundles if the layer inspector is opened and // the component hasn't been enabled yet. /// /// Returns true if the bundles have been initialized properly. /// public bool haveBundlesBeenInited { get; private set; } // Settings/Renderer bundles mapped to settings types Dictionary m_Bundles; PropertySheetFactory m_PropertySheetFactory; CommandBuffer m_LegacyCmdBufferBeforeReflections; CommandBuffer m_LegacyCmdBufferBeforeLighting; CommandBuffer m_LegacyCmdBufferOpaque; CommandBuffer m_LegacyCmdBuffer; Camera m_Camera; PostProcessRenderContext m_CurrentContext; LogHistogram m_LogHistogram; bool m_SettingsUpdateNeeded = true; bool m_IsRenderingInSceneView = false; TargetPool m_TargetPool; bool m_NaNKilled = false; // Recycled list - used to reduce GC stress when gathering active effects in a bundle list // on each frame readonly List m_ActiveEffects = new List(); readonly List m_Targets = new List(); void OnEnable() { Init(null); if (!haveBundlesBeenInited) InitBundles(); m_LogHistogram = new LogHistogram(); m_PropertySheetFactory = new PropertySheetFactory(); m_TargetPool = new TargetPool(); debugLayer.OnEnable(); if (RuntimeUtilities.scriptableRenderPipelineActive) return; InitLegacy(); } void InitLegacy() { m_LegacyCmdBufferBeforeReflections = new CommandBuffer { name = "Deferred Ambient Occlusion" }; m_LegacyCmdBufferBeforeLighting = new CommandBuffer { name = "Deferred Ambient Occlusion" }; m_LegacyCmdBufferOpaque = new CommandBuffer { name = "Opaque Only Post-processing" }; m_LegacyCmdBuffer = new CommandBuffer { name = "Post-processing" }; m_Camera = GetComponent(); #if !UNITY_2019_1_OR_NEWER // OnRenderImage (below) implies forceIntoRenderTexture m_Camera.forceIntoRenderTexture = true; // Needed when running Forward / LDR / No MSAA #endif m_Camera.AddCommandBuffer(CameraEvent.BeforeReflections, m_LegacyCmdBufferBeforeReflections); m_Camera.AddCommandBuffer(CameraEvent.BeforeLighting, m_LegacyCmdBufferBeforeLighting); m_Camera.AddCommandBuffer(CameraEvent.BeforeImageEffectsOpaque, m_LegacyCmdBufferOpaque); m_Camera.AddCommandBuffer(CameraEvent.BeforeImageEffects, m_LegacyCmdBuffer); // Internal context used if no SRP is set m_CurrentContext = new PostProcessRenderContext(); } #if UNITY_2019_1_OR_NEWER bool DynamicResolutionAllowsFinalBlitToCameraTarget() { return (!m_Camera.allowDynamicResolution || (ScalableBufferManager.heightScaleFactor == 1.0 && ScalableBufferManager.widthScaleFactor == 1.0)); } #endif #if UNITY_2019_1_OR_NEWER // We always use a CommandBuffer to blit to the final render target // OnRenderImage is used only to avoid the automatic blit from the RenderTexture of Camera.forceIntoRenderTexture to the actual target [ImageEffectUsesCommandBuffer] void OnRenderImage(RenderTexture src, RenderTexture dst) { if (finalBlitToCameraTarget && !m_CurrentContext.stereoActive && DynamicResolutionAllowsFinalBlitToCameraTarget()) RenderTexture.active = dst; // silence warning else Graphics.Blit(src, dst); } #endif /// /// Initializes this layer. If you create the layer via scripting you should always call /// this method. /// /// A reference to the resource asset public void Init(PostProcessResources resources) { if (resources != null) m_Resources = resources; RuntimeUtilities.CreateIfNull(ref temporalAntialiasing); RuntimeUtilities.CreateIfNull(ref subpixelMorphologicalAntialiasing); RuntimeUtilities.CreateIfNull(ref fastApproximateAntialiasing); RuntimeUtilities.CreateIfNull(ref dithering); RuntimeUtilities.CreateIfNull(ref fog); RuntimeUtilities.CreateIfNull(ref debugLayer); } /// /// Initializes all the effect bundles. This is called automatically by the framework. /// public void InitBundles() { if (haveBundlesBeenInited) return; // Create these lists only once, the serialization system will take over after that RuntimeUtilities.CreateIfNull(ref m_BeforeTransparentBundles); RuntimeUtilities.CreateIfNull(ref m_BeforeStackBundles); RuntimeUtilities.CreateIfNull(ref m_AfterStackBundles); // Create a bundle for each effect type m_Bundles = new Dictionary(); foreach (var type in PostProcessManager.instance.settingsTypes.Keys) { var settings = (PostProcessEffectSettings)ScriptableObject.CreateInstance(type); var bundle = new PostProcessBundle(settings); m_Bundles.Add(type, bundle); } // Update sorted lists with newly added or removed effects in the assemblies UpdateBundleSortList(m_BeforeTransparentBundles, PostProcessEvent.BeforeTransparent); UpdateBundleSortList(m_BeforeStackBundles, PostProcessEvent.BeforeStack); UpdateBundleSortList(m_AfterStackBundles, PostProcessEvent.AfterStack); // Push all sorted lists in a dictionary for easier access sortedBundles = new Dictionary>(new PostProcessEventComparer()) { { PostProcessEvent.BeforeTransparent, m_BeforeTransparentBundles }, { PostProcessEvent.BeforeStack, m_BeforeStackBundles }, { PostProcessEvent.AfterStack, m_AfterStackBundles } }; // Done haveBundlesBeenInited = true; } void UpdateBundleSortList(List sortedList, PostProcessEvent evt) { // First get all effects associated with the injection point var effects = m_Bundles.Where(kvp => kvp.Value.attribute.eventType == evt && !kvp.Value.attribute.builtinEffect) .Select(kvp => kvp.Value) .ToList(); // Remove types that don't exist anymore sortedList.RemoveAll(x => { string searchStr = x.assemblyQualifiedName; return !effects.Exists(b => b.settings.GetType().AssemblyQualifiedName == searchStr); }); // Add new ones foreach (var effect in effects) { string typeName = effect.settings.GetType().AssemblyQualifiedName; if (!sortedList.Exists(b => b.assemblyQualifiedName == typeName)) { var sbr = new SerializedBundleRef { assemblyQualifiedName = typeName }; sortedList.Add(sbr); } } // Link internal references foreach (var effect in sortedList) { string typeName = effect.assemblyQualifiedName; var bundle = effects.Find(b => b.settings.GetType().AssemblyQualifiedName == typeName); effect.bundle = bundle; } } void OnDisable() { // Have to check for null camera in case the user is doing back'n'forth between SRP and // legacy if (m_Camera != null) { if (m_LegacyCmdBufferBeforeReflections != null) m_Camera.RemoveCommandBuffer(CameraEvent.BeforeReflections, m_LegacyCmdBufferBeforeReflections); if (m_LegacyCmdBufferBeforeLighting != null) m_Camera.RemoveCommandBuffer(CameraEvent.BeforeLighting, m_LegacyCmdBufferBeforeLighting); if (m_LegacyCmdBufferOpaque != null) m_Camera.RemoveCommandBuffer(CameraEvent.BeforeImageEffectsOpaque, m_LegacyCmdBufferOpaque); if (m_LegacyCmdBuffer != null) m_Camera.RemoveCommandBuffer(CameraEvent.BeforeImageEffects, m_LegacyCmdBuffer); } temporalAntialiasing.Release(); m_LogHistogram.Release(); foreach (var bundle in m_Bundles.Values) bundle.Release(); m_Bundles.Clear(); m_PropertySheetFactory.Release(); if (debugLayer != null) debugLayer.OnDisable(); // Might be an issue if several layers are blending in the same frame... TextureLerper.instance.Clear(); haveBundlesBeenInited = false; } // Called everytime the user resets the component from the inspector and more importantly // the first time it's added to a GameObject. As we don't have added/removed event for // components, this will do fine void Reset() { volumeTrigger = transform; } void OnPreCull() { // Unused in scriptable render pipelines if (RuntimeUtilities.scriptableRenderPipelineActive) return; if (m_Camera == null || m_CurrentContext == null) InitLegacy(); // Postprocessing does tweak load/store actions when it uses render targets. // But when using builtin render pipeline, Camera will silently apply viewport when setting render target, // meaning that Postprocessing might think that it is rendering to fullscreen RT // and use LoadAction.DontCare freely, which will ruin the RT if we are using viewport. // It should actually check for having tiled architecture but this is not exposed to script, // so we are checking for mobile as a good substitute #if UNITY_2019_3_OR_NEWER if (SystemInfo.usesLoadStoreActions) #else if (Application.isMobilePlatform) #endif { Rect r = m_Camera.rect; if (Mathf.Abs(r.x) > 1e-6f || Mathf.Abs(r.y) > 1e-6f || Mathf.Abs(1.0f - r.width) > 1e-6f || Mathf.Abs(1.0f - r.height) > 1e-6f) { Debug.LogWarning("When used with builtin render pipeline, Postprocessing package expects to be used on a fullscreen Camera.\nPlease note that using Camera viewport may result in visual artefacts or some things not working.", m_Camera); } } // Resets the projection matrix from previous frame in case TAA was enabled. // We also need to force reset the non-jittered projection matrix here as it's not done // when ResetProjectionMatrix() is called and will break transparent rendering if TAA // is switched off and the FOV or any other camera property changes. if (m_CurrentContext.IsTemporalAntialiasingActive()) { #if UNITY_2018_2_OR_NEWER if (!m_Camera.usePhysicalProperties) #endif { m_Camera.ResetProjectionMatrix(); m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix; #if (ENABLE_VR_MODULE && ENABLE_VR) if (m_Camera.stereoEnabled) { m_Camera.ResetStereoProjectionMatrices(); if (m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Right) { m_Camera.CopyStereoDeviceProjectionMatrixToNonJittered(Camera.StereoscopicEye.Right); m_Camera.projectionMatrix = m_Camera.GetStereoNonJitteredProjectionMatrix(Camera.StereoscopicEye.Right); m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix; m_Camera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Right, m_Camera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right)); } else if (m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Left || m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Mono) { m_Camera.CopyStereoDeviceProjectionMatrixToNonJittered(Camera.StereoscopicEye.Left); // device to unjittered m_Camera.projectionMatrix = m_Camera.GetStereoNonJitteredProjectionMatrix(Camera.StereoscopicEye.Left); m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix; m_Camera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Left, m_Camera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left)); } } #endif } } #if (ENABLE_VR_MODULE && ENABLE_VR) if (m_Camera.stereoEnabled) { Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, XRSettings.renderViewportScale); } else #endif { Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, 1.0f); } BuildCommandBuffers(); } void OnPreRender() { // Unused in scriptable render pipelines // Only needed for multi-pass stereo right eye if (RuntimeUtilities.scriptableRenderPipelineActive || (m_Camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right)) return; BuildCommandBuffers(); } static bool RequiresInitialBlit(Camera camera, PostProcessRenderContext context) { // [ImageEffectUsesCommandBuffer] is currently broken, FIXME return true; /* #if UNITY_2019_1_OR_NEWER if (camera.allowMSAA) // this shouldn't be necessary, but until re-tested on older Unity versions just do the blits return true; if (RuntimeUtilities.scriptableRenderPipelineActive) // Should never be called from SRP return true; return false; #else return true; #endif */ } void UpdateSrcDstForOpaqueOnly(ref int src, ref int dst, PostProcessRenderContext context, RenderTargetIdentifier cameraTarget, int opaqueOnlyEffectsRemaining) { if (src > -1) context.command.ReleaseTemporaryRT(src); context.source = context.destination; src = dst; if (opaqueOnlyEffectsRemaining == 1) { context.destination = cameraTarget; } else { dst = m_TargetPool.Get(); context.destination = dst; context.GetScreenSpaceTemporaryRT(context.command, dst, 0, context.sourceFormat); } } void BuildCommandBuffers() { var context = m_CurrentContext; var sourceFormat = m_Camera.targetTexture ? m_Camera.targetTexture.format : (m_Camera.allowHDR ? RuntimeUtilities.defaultHDRRenderTextureFormat : RenderTextureFormat.Default); if (!RuntimeUtilities.isFloatingPointFormat(sourceFormat)) m_NaNKilled = true; context.Reset(); context.camera = m_Camera; context.sourceFormat = sourceFormat; // TODO: Investigate retaining command buffers on XR multi-pass right eye m_LegacyCmdBufferBeforeReflections.Clear(); m_LegacyCmdBufferBeforeLighting.Clear(); m_LegacyCmdBufferOpaque.Clear(); m_LegacyCmdBuffer.Clear(); SetupContext(context); context.command = m_LegacyCmdBufferOpaque; TextureLerper.instance.BeginFrame(context); UpdateVolumeSystem(context.camera, context.command); // Lighting & opaque-only effects var aoBundle = GetBundle(); var aoSettings = aoBundle.CastSettings(); var aoRenderer = aoBundle.CastRenderer(); bool aoSupported = aoSettings.IsEnabledAndSupported(context); bool aoAmbientOnly = aoRenderer.IsAmbientOnly(context); bool isAmbientOcclusionDeferred = aoSupported && aoAmbientOnly; bool isAmbientOcclusionOpaque = aoSupported && !aoAmbientOnly; var ssrBundle = GetBundle(); var ssrSettings = ssrBundle.settings; var ssrRenderer = ssrBundle.renderer; bool isScreenSpaceReflectionsActive = ssrSettings.IsEnabledAndSupported(context); #if UNITY_2019_1_OR_NEWER if (context.stereoActive) context.UpdateSinglePassStereoState(context.IsTemporalAntialiasingActive(), aoSupported, isScreenSpaceReflectionsActive); #endif // Ambient-only AO is a special case and has to be done in separate command buffers if (isAmbientOcclusionDeferred) { var ao = aoRenderer.Get(); // Render as soon as possible - should be done async in SRPs when available context.command = m_LegacyCmdBufferBeforeReflections; ao.RenderAmbientOnly(context); // Composite with GBuffer right before the lighting pass context.command = m_LegacyCmdBufferBeforeLighting; ao.CompositeAmbientOnly(context); } else if (isAmbientOcclusionOpaque) { context.command = m_LegacyCmdBufferOpaque; aoRenderer.Get().RenderAfterOpaque(context); } bool isFogActive = fog.IsEnabledAndSupported(context); bool hasCustomOpaqueOnlyEffects = HasOpaqueOnlyEffects(context); int opaqueOnlyEffects = 0; opaqueOnlyEffects += isScreenSpaceReflectionsActive ? 1 : 0; opaqueOnlyEffects += isFogActive ? 1 : 0; opaqueOnlyEffects += hasCustomOpaqueOnlyEffects ? 1 : 0; // This works on right eye because it is resolved/populated at runtime var cameraTarget = new RenderTargetIdentifier(BuiltinRenderTextureType.CameraTarget); if (opaqueOnlyEffects > 0) { var cmd = m_LegacyCmdBufferOpaque; context.command = cmd; context.source = cameraTarget; context.destination = cameraTarget; int srcTarget = -1; int dstTarget = -1; UpdateSrcDstForOpaqueOnly(ref srcTarget, ref dstTarget, context, cameraTarget, opaqueOnlyEffects + 1); // + 1 for blit if (RequiresInitialBlit(m_Camera, context) || opaqueOnlyEffects == 1) { cmd.BuiltinBlit(context.source, context.destination, RuntimeUtilities.copyStdMaterial, stopNaNPropagation ? 1 : 0); UpdateSrcDstForOpaqueOnly(ref srcTarget, ref dstTarget, context, cameraTarget, opaqueOnlyEffects); } if (isScreenSpaceReflectionsActive) { ssrRenderer.RenderOrLog(context); opaqueOnlyEffects--; UpdateSrcDstForOpaqueOnly(ref srcTarget, ref dstTarget, context, cameraTarget, opaqueOnlyEffects); } if (isFogActive) { fog.Render(context); opaqueOnlyEffects--; UpdateSrcDstForOpaqueOnly(ref srcTarget, ref dstTarget, context, cameraTarget, opaqueOnlyEffects); } if (hasCustomOpaqueOnlyEffects) RenderOpaqueOnly(context); cmd.ReleaseTemporaryRT(srcTarget); } // Post-transparency stack int tempRt = -1; bool forceNanKillPass = (!m_NaNKilled && stopNaNPropagation && RuntimeUtilities.isFloatingPointFormat(sourceFormat)); bool vrSinglePassInstancingEnabled = context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced; if (!vrSinglePassInstancingEnabled && (RequiresInitialBlit(m_Camera, context) || forceNanKillPass)) { int width = context.width; #if UNITY_2019_1_OR_NEWER && ENABLE_VR_MODULE && ENABLE_VR var xrDesc = XRSettings.eyeTextureDesc; if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) width = xrDesc.width; #endif tempRt = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(m_LegacyCmdBuffer, tempRt, 0, sourceFormat, RenderTextureReadWrite.sRGB, FilterMode.Bilinear, width); m_LegacyCmdBuffer.BuiltinBlit(cameraTarget, tempRt, RuntimeUtilities.copyStdMaterial, stopNaNPropagation ? 1 : 0); if (!m_NaNKilled) m_NaNKilled = stopNaNPropagation; context.source = tempRt; } else { context.source = cameraTarget; } context.destination = cameraTarget; #if UNITY_2019_1_OR_NEWER if (finalBlitToCameraTarget && !m_CurrentContext.stereoActive && !RuntimeUtilities.scriptableRenderPipelineActive && DynamicResolutionAllowsFinalBlitToCameraTarget()) { if (m_Camera.targetTexture) { context.destination = m_Camera.targetTexture.colorBuffer; } else { context.flip = true; context.destination = Display.main.colorBuffer; } } #endif context.command = m_LegacyCmdBuffer; Render(context); if (tempRt > -1) m_LegacyCmdBuffer.ReleaseTemporaryRT(tempRt); } void OnPostRender() { // Unused in scriptable render pipelines if (RuntimeUtilities.scriptableRenderPipelineActive) return; if (m_CurrentContext.IsTemporalAntialiasingActive()) { #if UNITY_2018_2_OR_NEWER // TAA calls SetProjectionMatrix so if the camera projection mode was physical, it gets set to explicit. So we set it back to physical. if (m_CurrentContext.physicalCamera) m_Camera.usePhysicalProperties = true; else #endif { // The camera must be reset on precull and post render to avoid issues with alpha when toggling TAA. m_Camera.ResetProjectionMatrix(); #if (ENABLE_VR_MODULE && ENABLE_VR) if (m_CurrentContext.stereoActive) { if (RuntimeUtilities.isSinglePassStereoEnabled || m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Right) { m_Camera.ResetStereoProjectionMatrices(); // copy the left eye onto the projection matrix so that we're using the correct projection matrix after calling m_Camera.ResetProjectionMatrix(); above. if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass) m_Camera.projectionMatrix = m_Camera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left); } } #endif } } } /// /// Grabs the bundle for the given effect type. /// /// An effect type. /// The bundle for the effect of type public PostProcessBundle GetBundle() where T : PostProcessEffectSettings { return GetBundle(typeof(T)); } /// /// Grabs the bundle for the given effect type. /// /// An effect type. /// The bundle for the effect of type public PostProcessBundle GetBundle(Type settingsType) { Assert.IsTrue(m_Bundles.ContainsKey(settingsType), "Invalid type"); return m_Bundles[settingsType]; } /// /// Gets the current settings for a given effect. /// /// The type of effect to look for /// The current state of an effect public T GetSettings() where T : PostProcessEffectSettings { return GetBundle().CastSettings(); } /// /// Utility method to bake a multi-scale volumetric obscurance map for the current camera. /// This will only work if ambient occlusion is active in the scene. /// /// The command buffer to use for rendering steps /// The camera to render ambient occlusion for /// The destination render target /// The depth map to use. If null, it will use the depth map /// from the given camera /// Should the result be inverted? /// Should use MSAA? public void BakeMSVOMap(CommandBuffer cmd, Camera camera, RenderTargetIdentifier destination, RenderTargetIdentifier? depthMap, bool invert, bool isMSAA = false) { var bundle = GetBundle(); var renderer = bundle.CastRenderer().GetMultiScaleVO(); renderer.SetResources(m_Resources); renderer.GenerateAOMap(cmd, camera, destination, depthMap, invert, isMSAA); } internal void OverrideSettings(List baseSettings, float interpFactor) { // Go through all settings & overriden parameters for the given volume and lerp values foreach (var settings in baseSettings) { if (!settings.active) continue; var target = GetBundle(settings.GetType()).settings; int count = settings.parameters.Count; for (int i = 0; i < count; i++) { var toParam = settings.parameters[i]; if (toParam.overrideState) { var fromParam = target.parameters[i]; fromParam.Interp(fromParam, toParam, interpFactor); } } } } // In the legacy render loop you have to explicitely set flags on camera to tell that you // need depth, depth+normals or motion vectors... This won't have any effect with most // scriptable render pipelines. void SetLegacyCameraFlags(PostProcessRenderContext context) { var flags = DepthTextureMode.None; foreach (var bundle in m_Bundles) { if (bundle.Value.settings.IsEnabledAndSupported(context)) flags |= bundle.Value.renderer.GetCameraFlags(); } // Special case for AA & lighting effects if (context.IsTemporalAntialiasingActive()) flags |= temporalAntialiasing.GetCameraFlags(); if (fog.IsEnabledAndSupported(context)) flags |= fog.GetCameraFlags(); if (debugLayer.debugOverlay != DebugOverlay.None) flags |= debugLayer.GetCameraFlags(); context.camera.depthTextureMode |= flags; cameraDepthFlags = flags; } /// /// This method should be called whenever you need to reset any temporal effect, e.g. when /// doing camera cuts. /// public void ResetHistory() { foreach (var bundle in m_Bundles) bundle.Value.ResetHistory(); temporalAntialiasing.ResetHistory(); } /// /// Checks if this layer has any active opaque-only effect. /// /// The current render context /// true if opaque-only effects are active, false otherwise public bool HasOpaqueOnlyEffects(PostProcessRenderContext context) { return HasActiveEffects(PostProcessEvent.BeforeTransparent, context); } /// /// Checks if this layer has any active effect at the given injection point. /// /// The injection point to look for /// The current render context /// true if any effect at the given injection point is active, false /// otherwise public bool HasActiveEffects(PostProcessEvent evt, PostProcessRenderContext context) { var list = sortedBundles[evt]; foreach (var item in list) { bool enabledAndSupported = item.bundle.settings.IsEnabledAndSupported(context); if (context.isSceneView) { if (item.bundle.attribute.allowInSceneView && enabledAndSupported) return true; } else if (enabledAndSupported) { return true; } } return false; } void SetupContext(PostProcessRenderContext context) { // Juggling required when a scene with post processing is loaded from an asset bundle // See #1148230 // Additional !RuntimeUtilities.isValidResources() to fix #1262826 // The static member s_Resources is unset by addressable. The code is ill formed as it // is not made to handle multiple scene. if (m_OldResources != m_Resources || !RuntimeUtilities.isValidResources()) { RuntimeUtilities.UpdateResources(m_Resources); m_OldResources = m_Resources; } m_IsRenderingInSceneView = context.camera.cameraType == CameraType.SceneView; context.isSceneView = m_IsRenderingInSceneView; context.resources = m_Resources; context.propertySheets = m_PropertySheetFactory; context.debugLayer = debugLayer; context.antialiasing = antialiasingMode; context.temporalAntialiasing = temporalAntialiasing; context.logHistogram = m_LogHistogram; #if UNITY_2018_2_OR_NEWER context.physicalCamera = context.camera.usePhysicalProperties; #endif SetLegacyCameraFlags(context); // Prepare debug overlay debugLayer.SetFrameSize(context.width, context.height); // Unsafe to keep this around but we need it for OnGUI events for debug views // Will be removed eventually m_CurrentContext = context; } /// /// Updates the state of the volume system. This should be called before any other /// post-processing method when running in a scriptable render pipeline. You don't need to /// call this method when running in one of the builtin pipelines. /// /// The currently rendering camera. /// A command buffer to fill. public void UpdateVolumeSystem(Camera cam, CommandBuffer cmd) { if (m_SettingsUpdateNeeded) { cmd.BeginSample("VolumeBlending"); PostProcessManager.instance.UpdateSettings(this, cam); cmd.EndSample("VolumeBlending"); m_TargetPool.Reset(); // TODO: fix me once VR support is in SRP // Needed in SRP so that _RenderViewportScaleFactor isn't 0 if (RuntimeUtilities.scriptableRenderPipelineActive) Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, 1f); } m_SettingsUpdateNeeded = false; } /// /// Renders effects in the bucket. You /// should call before calling this method as it won't /// automatically blit source into destination if no opaque-only effect is active. /// /// The current post-processing context. public void RenderOpaqueOnly(PostProcessRenderContext context) { if (RuntimeUtilities.scriptableRenderPipelineActive) SetupContext(context); TextureLerper.instance.BeginFrame(context); // Update & override layer settings first (volume blending), will only be done once per // frame, either here or in Render() if there isn't any opaque-only effect to render. // TODO: should be removed, keeping this here for older SRPs UpdateVolumeSystem(context.camera, context.command); RenderList(sortedBundles[PostProcessEvent.BeforeTransparent], context, "OpaqueOnly"); } /// /// Renders all effects not in the bucket. /// /// The current post-processing context. public void Render(PostProcessRenderContext context) { if (RuntimeUtilities.scriptableRenderPipelineActive) SetupContext(context); TextureLerper.instance.BeginFrame(context); var cmd = context.command; // Update & override layer settings first (volume blending) if the opaque only pass // hasn't been called this frame. // TODO: should be removed, keeping this here for older SRPs UpdateVolumeSystem(context.camera, context.command); // Do a NaN killing pass if needed int lastTarget = -1; RenderTargetIdentifier cameraTexture = context.source; #if UNITY_2019_1_OR_NEWER if (context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.SetSinglePassStereo(SinglePassStereoMode.None); cmd.DisableShaderKeyword("UNITY_SINGLE_PASS_STEREO"); } #endif for (int eye = 0; eye < context.numberOfEyes; eye++) { bool preparedStereoSource = false; if (stopNaNPropagation && !m_NaNKilled) { lastTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, lastTarget, 0, context.sourceFormat); if (context.stereoActive && context.numberOfEyes > 1) { if (context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) { cmd.BlitFullscreenTriangleFromTexArray(context.source, lastTarget, RuntimeUtilities.copyFromTexArraySheet, 1, false, eye); preparedStereoSource = true; } else if (context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.BlitFullscreenTriangleFromDoubleWide(context.source, lastTarget, RuntimeUtilities.copyStdFromDoubleWideMaterial, 1, eye); preparedStereoSource = true; } } else cmd.BlitFullscreenTriangle(context.source, lastTarget, RuntimeUtilities.copySheet, 1); context.source = lastTarget; m_NaNKilled = true; } if (!preparedStereoSource && context.numberOfEyes > 1) { lastTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, lastTarget, 0, context.sourceFormat); if (context.stereoActive) { if (context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) { cmd.BlitFullscreenTriangleFromTexArray(context.source, lastTarget, RuntimeUtilities.copyFromTexArraySheet, 1, false, eye); preparedStereoSource = true; } else if (context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.BlitFullscreenTriangleFromDoubleWide(context.source, lastTarget, RuntimeUtilities.copyStdFromDoubleWideMaterial, stopNaNPropagation ? 1 : 0, eye); preparedStereoSource = true; } } context.source = lastTarget; } // Do temporal anti-aliasing first if (context.IsTemporalAntialiasingActive()) { if (!RuntimeUtilities.scriptableRenderPipelineActive) { if (context.stereoActive) { // We only need to configure all of this once for stereo, during OnPreCull if (context.camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right) temporalAntialiasing.ConfigureStereoJitteredProjectionMatrices(context); } else { temporalAntialiasing.ConfigureJitteredProjectionMatrix(context); } } var taaTarget = m_TargetPool.Get(); var finalDestination = context.destination; context.GetScreenSpaceTemporaryRT(cmd, taaTarget, 0, context.sourceFormat); context.destination = taaTarget; temporalAntialiasing.Render(context); context.source = taaTarget; context.destination = finalDestination; if (lastTarget > -1) cmd.ReleaseTemporaryRT(lastTarget); lastTarget = taaTarget; } bool hasBeforeStackEffects = HasActiveEffects(PostProcessEvent.BeforeStack, context); bool hasAfterStackEffects = HasActiveEffects(PostProcessEvent.AfterStack, context) && !breakBeforeColorGrading; bool needsFinalPass = (hasAfterStackEffects || (antialiasingMode == Antialiasing.FastApproximateAntialiasing) || (antialiasingMode == Antialiasing.SubpixelMorphologicalAntialiasing && subpixelMorphologicalAntialiasing.IsSupported())) && !breakBeforeColorGrading; // Right before the builtin stack if (hasBeforeStackEffects) lastTarget = RenderInjectionPoint(PostProcessEvent.BeforeStack, context, "BeforeStack", lastTarget); // Builtin stack lastTarget = RenderBuiltins(context, !needsFinalPass, lastTarget, eye); // After the builtin stack but before the final pass (before FXAA & Dithering) if (hasAfterStackEffects) lastTarget = RenderInjectionPoint(PostProcessEvent.AfterStack, context, "AfterStack", lastTarget); // And close with the final pass if (needsFinalPass) RenderFinalPass(context, lastTarget, eye); if (context.stereoActive) context.source = cameraTexture; } #if UNITY_2019_1_OR_NEWER if (context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.SetSinglePassStereo(SinglePassStereoMode.SideBySide); cmd.EnableShaderKeyword("UNITY_SINGLE_PASS_STEREO"); } #endif // Render debug monitors & overlay if requested debugLayer.RenderSpecialOverlays(context); debugLayer.RenderMonitors(context); // End frame cleanup TextureLerper.instance.EndFrame(); debugLayer.EndFrame(); m_SettingsUpdateNeeded = true; m_NaNKilled = false; } int RenderInjectionPoint(PostProcessEvent evt, PostProcessRenderContext context, string marker, int releaseTargetAfterUse = -1) { int tempTarget = m_TargetPool.Get(); var finalDestination = context.destination; var cmd = context.command; context.GetScreenSpaceTemporaryRT(cmd, tempTarget, 0, context.sourceFormat); context.destination = tempTarget; RenderList(sortedBundles[evt], context, marker); context.source = tempTarget; context.destination = finalDestination; if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); return tempTarget; } void RenderList(List list, PostProcessRenderContext context, string marker) { var cmd = context.command; cmd.BeginSample(marker); // First gather active effects - we need this to manage render targets more efficiently m_ActiveEffects.Clear(); for (int i = 0; i < list.Count; i++) { var effect = list[i].bundle; if (effect.settings.IsEnabledAndSupported(context)) { if (!context.isSceneView || (context.isSceneView && effect.attribute.allowInSceneView)) m_ActiveEffects.Add(effect.renderer); } } int count = m_ActiveEffects.Count; // If there's only one active effect, we can simply execute it and skip the rest if (count == 1) { m_ActiveEffects[0].RenderOrLog(context); } else { // Else create the target chain m_Targets.Clear(); m_Targets.Add(context.source); // First target is always source int tempTarget1 = m_TargetPool.Get(); int tempTarget2 = m_TargetPool.Get(); for (int i = 0; i < count - 1; i++) m_Targets.Add(i % 2 == 0 ? tempTarget1 : tempTarget2); m_Targets.Add(context.destination); // Last target is always destination // Render context.GetScreenSpaceTemporaryRT(cmd, tempTarget1, 0, context.sourceFormat); if (count > 2) context.GetScreenSpaceTemporaryRT(cmd, tempTarget2, 0, context.sourceFormat); for (int i = 0; i < count; i++) { context.source = m_Targets[i]; context.destination = m_Targets[i + 1]; m_ActiveEffects[i].RenderOrLog(context); } cmd.ReleaseTemporaryRT(tempTarget1); if (count > 2) cmd.ReleaseTemporaryRT(tempTarget2); } cmd.EndSample(marker); } void ApplyFlip(PostProcessRenderContext context, MaterialPropertyBlock properties) { if (context.flip && !context.isSceneView) properties.SetVector(ShaderIDs.UVTransform, new Vector4(1.0f, 1.0f, 0.0f, 0.0f)); else ApplyDefaultFlip(properties); } void ApplyDefaultFlip(MaterialPropertyBlock properties) { properties.SetVector(ShaderIDs.UVTransform, SystemInfo.graphicsUVStartsAtTop ? new Vector4(1.0f, -1.0f, 0.0f, 1.0f) : new Vector4(1.0f, 1.0f, 0.0f, 0.0f)); } int RenderBuiltins(PostProcessRenderContext context, bool isFinalPass, int releaseTargetAfterUse = -1, int eye = -1) { var uberSheet = context.propertySheets.Get(context.resources.shaders.uber); uberSheet.ClearKeywords(); uberSheet.properties.Clear(); context.uberSheet = uberSheet; context.autoExposureTexture = RuntimeUtilities.whiteTexture; context.bloomBufferNameID = -1; if (isFinalPass && context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) uberSheet.EnableKeyword("STEREO_INSTANCING_ENABLED"); var cmd = context.command; cmd.BeginSample("BuiltinStack"); int tempTarget = -1; var finalDestination = context.destination; if (!isFinalPass) { // Render to an intermediate target as this won't be the final pass tempTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, tempTarget, 0, context.sourceFormat); context.destination = tempTarget; // Handle FXAA's keep alpha mode if (antialiasingMode == Antialiasing.FastApproximateAntialiasing && !fastApproximateAntialiasing.keepAlpha && RuntimeUtilities.hasAlpha(context.sourceFormat)) uberSheet.properties.SetFloat(ShaderIDs.LumaInAlpha, 1f); } // Depth of field final combination pass used to be done in Uber which led to artifacts // when used at the same time as Bloom (because both effects used the same source, so // the stronger bloom was, the more DoF was eaten away in out of focus areas) int depthOfFieldTarget = RenderEffect(context, true); // Motion blur is a separate pass - could potentially be done after DoF depending on the // kind of results you're looking for... int motionBlurTarget = RenderEffect(context, true); // Prepare exposure histogram if needed if (ShouldGenerateLogHistogram(context)) m_LogHistogram.Generate(context); // Uber effects // 1336238: override xrActiveEye in multipass with the currently rendered eye to fix flickering issue. int xrActiveEyeBackup = context.xrActiveEye; if (context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.MultiPass) context.xrActiveEye = eye; RenderEffect(context); context.xrActiveEye = xrActiveEyeBackup; // restore the eye uberSheet.properties.SetTexture(ShaderIDs.AutoExposureTex, context.autoExposureTexture); RenderEffect(context); RenderEffect(context); RenderEffect(context); RenderEffect(context); RenderEffect(context); if (!breakBeforeColorGrading) RenderEffect(context); if (isFinalPass) { uberSheet.EnableKeyword("FINALPASS"); dithering.Render(context); ApplyFlip(context, uberSheet.properties); } else { ApplyDefaultFlip(uberSheet.properties); } if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) { uberSheet.properties.SetFloat(ShaderIDs.DepthSlice, eye); cmd.BlitFullscreenTriangleToTexArray(context.source, context.destination, uberSheet, 0, false, eye); } else if (isFinalPass && context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.BlitFullscreenTriangleToDoubleWide(context.source, context.destination, uberSheet, 0, eye); } #if LWRP_1_0_0_OR_NEWER || UNIVERSAL_1_0_0_OR_NEWER else if (isFinalPass) cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, 0, false, context.camera.pixelRect); #endif else cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, 0); context.source = context.destination; context.destination = finalDestination; if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); if (motionBlurTarget > -1) cmd.ReleaseTemporaryRT(motionBlurTarget); if (depthOfFieldTarget > -1) cmd.ReleaseTemporaryRT(depthOfFieldTarget); if (context.bloomBufferNameID > -1) cmd.ReleaseTemporaryRT(context.bloomBufferNameID); cmd.EndSample("BuiltinStack"); return tempTarget; } // This pass will have to be disabled for HDR screen output as it's an LDR pass void RenderFinalPass(PostProcessRenderContext context, int releaseTargetAfterUse = -1, int eye = -1) { var cmd = context.command; cmd.BeginSample("FinalPass"); if (breakBeforeColorGrading) { var sheet = context.propertySheets.Get(context.resources.shaders.discardAlpha); if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) sheet.EnableKeyword("STEREO_INSTANCING_ENABLED"); if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) { sheet.properties.SetFloat(ShaderIDs.DepthSlice, eye); cmd.BlitFullscreenTriangleToTexArray(context.source, context.destination, sheet, 0, false, eye); } else if (context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.BlitFullscreenTriangleToDoubleWide(context.source, context.destination, sheet, 0, eye); } else cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, 0); } else { var uberSheet = context.propertySheets.Get(context.resources.shaders.finalPass); uberSheet.ClearKeywords(); uberSheet.properties.Clear(); context.uberSheet = uberSheet; int tempTarget = -1; if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) uberSheet.EnableKeyword("STEREO_INSTANCING_ENABLED"); if (antialiasingMode == Antialiasing.FastApproximateAntialiasing) { uberSheet.EnableKeyword(fastApproximateAntialiasing.fastMode ? "FXAA_LOW" : "FXAA" ); if (RuntimeUtilities.hasAlpha(context.sourceFormat)) { if (fastApproximateAntialiasing.keepAlpha) uberSheet.EnableKeyword("FXAA_KEEP_ALPHA"); } else uberSheet.EnableKeyword("FXAA_NO_ALPHA"); } else if (antialiasingMode == Antialiasing.SubpixelMorphologicalAntialiasing && subpixelMorphologicalAntialiasing.IsSupported()) { tempTarget = m_TargetPool.Get(); var finalDestination = context.destination; context.GetScreenSpaceTemporaryRT(context.command, tempTarget, 0, context.sourceFormat); context.destination = tempTarget; subpixelMorphologicalAntialiasing.Render(context); context.source = tempTarget; context.destination = finalDestination; } dithering.Render(context); ApplyFlip(context, uberSheet.properties); if (context.stereoActive && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePassInstanced) { uberSheet.properties.SetFloat(ShaderIDs.DepthSlice, eye); cmd.BlitFullscreenTriangleToTexArray(context.source, context.destination, uberSheet, 0, false, eye); } else if (context.stereoActive && context.numberOfEyes > 1 && context.stereoRenderingMode == PostProcessRenderContext.StereoRenderingMode.SinglePass) { cmd.BlitFullscreenTriangleToDoubleWide(context.source, context.destination, uberSheet, 0, eye); } else #if LWRP_1_0_0_OR_NEWER || UNIVERSAL_1_0_0_OR_NEWER cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, 0, false, context.camera.pixelRect); #else cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, 0); #endif if (tempTarget > -1) cmd.ReleaseTemporaryRT(tempTarget); } if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); cmd.EndSample("FinalPass"); } int RenderEffect(PostProcessRenderContext context, bool useTempTarget = false) where T : PostProcessEffectSettings { var effect = GetBundle(); if (!effect.settings.IsEnabledAndSupported(context)) return -1; if (m_IsRenderingInSceneView && !effect.attribute.allowInSceneView) return -1; if (!useTempTarget) { effect.renderer.RenderOrLog(context); return -1; } var finalDestination = context.destination; var tempTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(context.command, tempTarget, 0, context.sourceFormat); context.destination = tempTarget; effect.renderer.RenderOrLog(context); context.source = tempTarget; context.destination = finalDestination; return tempTarget; } bool ShouldGenerateLogHistogram(PostProcessRenderContext context) { bool autoExpo = GetBundle().settings.IsEnabledAndSupported(context); bool lightMeter = debugLayer.lightMeter.IsRequestedAndSupported(context); return autoExpo || lightMeter; } } }