#if ENABLE_UNITY_COLLECTIONS_CHECKS
#define ENABLE_UNITY_ALLOCATION_CHECKS
#endif
#pragma warning disable 0649
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using AOT;
using Unity.Burst;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Mathematics;
using UnityEngine.Assertions;
using Unity.Jobs.LowLevel.Unsafe;
namespace Unity.Collections
{
///
/// Manages custom memory allocators.
///
public static class AllocatorManager
{
internal static Block AllocateBlock(ref this T t, int sizeOf, int alignOf, int items) where T : unmanaged, IAllocator
{
CheckValid(t.Handle);
Block block = default;
block.Range.Pointer = IntPtr.Zero;
block.Range.Items = items;
block.Range.Allocator = t.Handle;
block.BytesPerItem = sizeOf;
// Make the alignment multiple of cacheline size
block.Alignment = math.max(JobsUtility.CacheLineSize, alignOf);
var error = t.Try(ref block);
CheckFailedToAllocate(error);
return block;
}
internal static Block AllocateBlock(ref this T t, U u, int items) where T : unmanaged, IAllocator where U : unmanaged
{
return AllocateBlock(ref t, UnsafeUtility.SizeOf(), UnsafeUtility.AlignOf(), items);
}
internal static unsafe void* Allocate(ref this T t, int sizeOf, int alignOf, int items) where T : unmanaged, IAllocator
{
return (void*)AllocateBlock(ref t, sizeOf, alignOf, items).Range.Pointer;
}
internal static unsafe U* Allocate(ref this T t, U u, int items) where T : unmanaged, IAllocator where U : unmanaged
{
return (U*)Allocate(ref t, UnsafeUtility.SizeOf(), UnsafeUtility.AlignOf(), items);
}
internal static unsafe void* AllocateStruct(ref this T t, U u, int items) where T : unmanaged, IAllocator where U : struct
{
return (void*)Allocate(ref t, UnsafeUtility.SizeOf(), UnsafeUtility.AlignOf(), items);
}
internal static unsafe void FreeBlock(ref this T t, ref Block block) where T : unmanaged, IAllocator
{
CheckValid(t.Handle);
block.Range.Items = 0;
var error = t.Try(ref block);
CheckFailedToFree(error);
}
internal static unsafe void Free(ref this T t, void* pointer, int sizeOf, int alignOf, int items) where T : unmanaged, IAllocator
{
if (pointer == null)
return;
Block block = default;
block.AllocatedItems = items;
block.Range.Pointer = (IntPtr)pointer;
block.BytesPerItem = sizeOf;
block.Alignment = alignOf;
t.FreeBlock(ref block);
}
internal static unsafe void Free(ref this T t, U* pointer, int items) where T : unmanaged, IAllocator where U : unmanaged
{
Free(ref t, pointer, UnsafeUtility.SizeOf(), UnsafeUtility.AlignOf(), items);
}
///
/// Allocates memory from an allocator.
///
/// A handle to the allocator.
/// The number of bytes to allocate.
/// The alignment in bytes (must be a power of two).
/// The number of values to allocate space for. Defaults to 1.
/// A pointer to the allocated memory.
public unsafe static void* Allocate(AllocatorHandle handle, int itemSizeInBytes, int alignmentInBytes, int items = 1)
{
return handle.Allocate(itemSizeInBytes, alignmentInBytes, items);
}
///
/// Allocates enough memory for an unmanaged value of a given type.
///
/// The type of value to allocate for.
/// A handle to the allocator.
/// The number of values to allocate for space for. Defaults to 1.
/// A pointer to the allocated memory.
public unsafe static T* Allocate(AllocatorHandle handle, int items = 1) where T : unmanaged
{
return handle.Allocate(default(T), items);
}
///
/// Frees an allocation.
///
/// For some allocators, the size of the allocation must be known to properly deallocate.
/// Other allocators only need the pointer when deallocating and so will ignore `itemSizeInBytes`, `alignmentInBytes` and `items`.
/// A handle to the allocator.
/// A pointer to the allocated memory.
/// The size in bytes of the allocation.
/// The alignment in bytes (must be a power of two).
/// The number of values that the memory was allocated for.
public unsafe static void Free(AllocatorHandle handle, void* pointer, int itemSizeInBytes, int alignmentInBytes, int items = 1)
{
handle.Free(pointer, itemSizeInBytes, alignmentInBytes, items);
}
///
/// Frees an allocation.
///
/// A handle to the allocator.
/// A pointer to the allocated memory.
public unsafe static void Free(AllocatorHandle handle, void* pointer)
{
handle.Free((byte*)pointer, 1);
}
///
/// Frees an allocation.
///
/// For some allocators, the size of the allocation must be known to properly deallocate.
/// Other allocators only need the pointer when deallocating and so will ignore `T` and `items`.
/// The type of value that the memory was allocated for.
/// A handle to the allocator.
/// A pointer to the allocated memory.
/// The number of values that the memory was allocated for.
public unsafe static void Free(AllocatorHandle handle, T* pointer, int items = 1) where T : unmanaged
{
handle.Free(pointer, items);
}
///
/// Corresponds to Allocator.Invalid.
///
/// Corresponds to Allocator.Invalid.
public static readonly AllocatorHandle Invalid = new AllocatorHandle { Index = 0 };
///
/// Corresponds to Allocator.None.
///
/// Corresponds to Allocator.None.
public static readonly AllocatorHandle None = new AllocatorHandle { Index = 1 };
///
/// Corresponds to Allocator.Temp.
///
/// Corresponds to Allocator.Temp.
public static readonly AllocatorHandle Temp = new AllocatorHandle { Index = 2 };
///
/// Corresponds to Allocator.TempJob.
///
/// Corresponds to Allocator.TempJob.
public static readonly AllocatorHandle TempJob = new AllocatorHandle { Index = 3 };
///
/// Corresponds to Allocator.Persistent.
///
/// Corresponds to Allocator.Persistent.
public static readonly AllocatorHandle Persistent = new AllocatorHandle { Index = 4 };
///
/// Corresponds to Allocator.AudioKernel.
///
/// Corresponds to Allocator.AudioKernel.
public static readonly AllocatorHandle AudioKernel = new AllocatorHandle { Index = 5 };
///
/// Used for calling an allocator function.
///
public delegate int TryFunction(IntPtr allocatorState, ref Block block);
///
/// Represents the allocator function used within an allocator.
///
[StructLayout(LayoutKind.Sequential)]
public struct AllocatorHandle : IAllocator
{
internal ref TableEntry TableEntry => ref SharedStatics.TableEntry.Ref.Data.ElementAt(Index);
internal bool IsInstalled => ((SharedStatics.IsInstalled.Ref.Data.ElementAt(Index>>6) >> (Index&63)) & 1) != 0;
internal void IncrementVersion()
{
#if ENABLE_UNITY_ALLOCATION_CHECKS
if (IsInstalled && IsCurrent)
{
// When allocator version is larger than 0x7FFF, allocator.ToAllocator
// returns a negative value which causes problem when comparing to Allocator.None.
// So only lower 15 bits of version is valid.
Version = OfficialVersion = (ushort)(++OfficialVersion & 0x7FFF);
}
#endif
}
internal void Rewind()
{
#if ENABLE_UNITY_ALLOCATION_CHECKS
InvalidateDependents();
IncrementVersion();
#endif
}
internal void Install(TableEntry tableEntry)
{
#if ENABLE_UNITY_ALLOCATION_CHECKS
// if this allocator has never been visited before, then the unsafelists for its child allocators
// and child safety handles are uninitialized, which means their allocator is Allocator.Invalid.
// rectify that here.
if(ChildSafetyHandles.Allocator.Value != (int)Allocator.Persistent)
{
ChildSafetyHandles = new UnsafeList(0, Allocator.Persistent);
ChildAllocators = new UnsafeList(0, Allocator.Persistent);
}
#endif
Rewind();
TableEntry = tableEntry;
}
#if ENABLE_UNITY_ALLOCATION_CHECKS
internal ref ushort OfficialVersion => ref SharedStatics.Version.Ref.Data.ElementAt(Index);
internal ref UnsafeList ChildSafetyHandles => ref SharedStatics.ChildSafetyHandles.Ref.Data.ElementAt(Index);
internal ref UnsafeList ChildAllocators => ref SharedStatics.ChildAllocators.Ref.Data.ElementAt(Index);
internal ref AllocatorHandle Parent => ref SharedStatics.Parent.Ref.Data.ElementAt(Index);
internal ref int IndexInParent => ref SharedStatics.IndexInParent.Ref.Data.ElementAt(Index);
internal bool IsCurrent => (Version == 0) || (Version == OfficialVersion);
internal bool IsValid => (Index < FirstUserIndex) || (IsInstalled && IsCurrent);
///
/// Determines if the handle is still valid, because we intend to release it if it is.
///
/// Safety handle.
internal static unsafe bool CheckExists(AtomicSafetyHandle handle)
{
bool res = false;
#if UNITY_DOTSRUNTIME
// In DOTS Runtime, AtomicSaftyHandle version is at 8 bytes offset of nodePtr
int* versionNode = (int*)((byte *)handle.nodePtr + sizeof(void *));
res = (handle.version == (*versionNode & AtomicSafetyNodeVersionMask.ReadWriteDisposeUnprotect));
#else
int* versionNode = (int*) (void*) handle.versionNode;
res = (handle.version == (*versionNode & AtomicSafetyHandle.ReadWriteDisposeCheck));
#endif
return res;
}
internal static unsafe bool AreTheSame(AtomicSafetyHandle a, AtomicSafetyHandle b)
{
if(a.version != b.version)
return false;
#if UNITY_DOTSRUNTIME
if(a.nodePtr != b.nodePtr)
#else
if(a.versionNode != b.versionNode)
#endif
return false;
return true;
}
internal static bool AreTheSame(AllocatorHandle a, AllocatorHandle b)
{
if(a.Index != b.Index)
return false;
if(a.Version != b.Version)
return false;
return true;
}
internal bool NeedsUseAfterFreeTracking()
{
if(IsValid == false)
return false;
if(ChildSafetyHandles.Allocator.Value != (int)Allocator.Persistent)
return false;
return true;
}
///
/// For internal use only.
///
/// For internal use only.
public const int InvalidChildSafetyHandleIndex = -1;
internal int AddSafetyHandle(AtomicSafetyHandle handle)
{
if(!NeedsUseAfterFreeTracking())
return InvalidChildSafetyHandleIndex;
var result = ChildSafetyHandles.Length;
ChildSafetyHandles.Add(handle);
return result;
}
internal bool TryRemoveSafetyHandle(AtomicSafetyHandle handle, int safetyHandleIndex)
{
if(!NeedsUseAfterFreeTracking())
return false;
if(safetyHandleIndex == InvalidChildSafetyHandleIndex)
return false;
safetyHandleIndex = math.min(safetyHandleIndex, ChildSafetyHandles.Length - 1);
while(safetyHandleIndex >= 0)
{
unsafe
{
var safetyHandle = ChildSafetyHandles.Ptr + safetyHandleIndex;
if(AreTheSame(*safetyHandle, handle))
{
ChildSafetyHandles.RemoveAtSwapBack(safetyHandleIndex);
return true;
}
}
--safetyHandleIndex;
}
return false;
}
///
/// For internal use only.
///
/// For internal use only.
public const int InvalidChildAllocatorIndex = -1;
internal int AddChildAllocator(AllocatorHandle handle)
{
if(!NeedsUseAfterFreeTracking())
return InvalidChildAllocatorIndex;
var result = ChildAllocators.Length;
ChildAllocators.Add(handle);
handle.Parent = this;
handle.IndexInParent = result;
return result;
}
internal bool TryRemoveChildAllocator(AllocatorHandle handle, int childAllocatorIndex)
{
if(!NeedsUseAfterFreeTracking())
return false;
if(childAllocatorIndex == InvalidChildAllocatorIndex)
return false;
childAllocatorIndex = math.min(childAllocatorIndex, ChildAllocators.Length - 1);
while(childAllocatorIndex >= 0)
{
unsafe
{
var allocatorHandle = ChildAllocators.Ptr + childAllocatorIndex;
if(AreTheSame(*allocatorHandle, handle))
{
ChildAllocators.RemoveAtSwapBack(childAllocatorIndex);
return true;
}
}
--childAllocatorIndex;
}
return false;
}
// when you rewind an allocator, it invalidates and unregisters all of its child allocators - allocators that use as
// backing memory, memory that was allocated from this (parent) allocator. the rewind operation was itself unmanaged,
// until we added a managed global table of delegates alongside the unmanaged global table of function pointers. once
// this table was added, the "unregister" extension function became managed, because it manipulates a managed array of
// delegates.
// a workaround (UnmanagedUnregister) was found that makes it possible for rewind to become unmanaged again: only in
// the case that we rewind an allocator and invalidate all of its child allocators, we then unregister the child
// allocators without touching the managed array of delegates as well.
// this can "leak" delegates - it's possible for this to cause us to hold onto a GC reference to a delegate until
// the end of the program, long after the delegate is no longer needed. but, there are only 65,536 such slots to
// burn, and delegates are small data structures, and the leak ends when a delegate slot is reused, and most importantly,
// when we've rewound an allocator while child allocators remain registered, we are likely before long to encounter
// a use-before-free crash or a safety handle violation, both of which are likely to terminate the session before
// anything can leak.
[NotBurstCompatible]
internal void InvalidateDependents()
{
if(!NeedsUseAfterFreeTracking())
return;
for(var i = 0; i < ChildSafetyHandles.Length; ++i)
{
unsafe
{
AtomicSafetyHandle* handle = ChildSafetyHandles.Ptr + i;
if(CheckExists(*handle))
AtomicSafetyHandle.Release(*handle);
}
}
ChildSafetyHandles.Clear();
if(Parent.IsValid)
Parent.TryRemoveChildAllocator(this, IndexInParent);
Parent = default;
IndexInParent = InvalidChildAllocatorIndex;
for(var i = 0; i < ChildAllocators.Length; ++i)
{
unsafe
{
AllocatorHandle* handle = (AllocatorHandle*)ChildAllocators.Ptr + i;
if(handle->IsValid)
handle->UnmanagedUnregister(); // see above comment
}
}
ChildAllocators.Clear();
}
#endif
///
/// Returns the AllocatorHandle of an allocator.
///
/// The Allocator to copy.
/// The AllocatorHandle of an allocator.
public static implicit operator AllocatorHandle(Allocator a) => new AllocatorHandle
{
Index = (ushort)((uint)a & 0xFFFF),
Version = (ushort)((uint)a >> 16)
};
///
/// This allocator's index into the global table of allocator functions.
///
/// This allocator's index into the global table of allocator functions.
public ushort Index;
///
/// This allocator's version number.
///
/// An allocator function is uniquely identified by its *combination* of and together: each
/// index has a version number that starts at 0; the version number is incremented each time the allocator is invalidated. Only the
/// lower 15 bits of Version is in use because when allocator version is larger than 0x7FFF, allocator.ToAllocator returns a negative value
/// which causes problem when comparing to Allocator.None.
///
/// This allocator's version number.
public ushort Version;
///
/// The cast to int.
///
/// The cast to int.
public int Value => Index;
///
/// Allocates a block from this allocator.
///
/// The type of value to allocate for.
/// Outputs the allocated block.
/// The number of values to allocate for.
/// 0 if successful. Otherwise, returns the error code from the allocator function.
public int TryAllocateBlock(out Block block, int items) where T : struct
{
block = new Block
{
Range = new Range { Items = items, Allocator = this },
BytesPerItem = UnsafeUtility.SizeOf(),
Alignment = 1 << math.min(3, math.tzcnt(UnsafeUtility.SizeOf()))
};
var returnCode = Try(ref block);
return returnCode;
}
///
/// Allocates a block with this allocator function.
///
/// The type of value to allocate for.
/// The number of values to allocate for.
/// The allocated block.
/// Thrown if the allocator is not valid or if the allocation failed.
public Block AllocateBlock(int items) where T : struct
{
CheckValid(this);
var error = TryAllocateBlock(out Block block, items);
CheckAllocatedSuccessfully(error);
return block;
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
static void CheckAllocatedSuccessfully(int error)
{
if (error != 0)
throw new ArgumentException($"Error {error}: Failed to Allocate");
}
///
/// For internal use only.
///
/// For internal use only.
public TryFunction Function => default;
///
/// Tries to allocate the block with this allocator.
///
/// The block to allocate.
/// 0 if successful. Otherwise, returns an error code.
public int Try(ref Block block)
{
block.Range.Allocator = this;
var error = AllocatorManager.Try(ref block);
return error;
}
///
/// This handle.
///
/// This handle.
public AllocatorHandle Handle { get { return this; } set { this = value; } }
///
/// Retrieve the Allocator associated with this allocator handle.
///
/// The Allocator retrieved.
public Allocator ToAllocator
{
get
{
uint lo = Index;
uint hi = Version;
uint value = (hi << 16) | lo;
return (Allocator)value;
}
}
///
/// Check whether this allocator is a custom allocator.
///
/// The AllocatorHandle is a custom allocator if its Index is larger or equal to `FirstUserIndex`.
/// True if this AllocatorHandle is a custom allocator.
public bool IsCustomAllocator { get { return this.Index >= FirstUserIndex; } }
///
/// Dispose the allocator.
///
public void Dispose()
{
Rewind();
}
}
///
/// For internal use only.
///
[StructLayout(LayoutKind.Sequential)]
public struct BlockHandle
{
///
/// Represents the handle.
///
/// Represents the handle.
public ushort Value;
}
///
/// A range of allocated memory.
///
/// The name is perhaps misleading: only in combination with a does
/// a `Range` have sufficient information to represent the number of bytes in an allocation. The reason `Range` is its own type that's separate from `Block`
/// stems from some efficiency concerns in the implementation details. In most cases, a `Range` is only used in conjunction with an associated `Block`.
///
[StructLayout(LayoutKind.Sequential)]
public struct Range : IDisposable
{
///
/// Pointer to the start of this range.
///
/// Pointer to the start of this range.
public IntPtr Pointer; // 0
///
/// Number of items allocated in this range.
///
/// The actual allocation may be larger. See .
/// Number of items allocated in this range.
public int Items; // 8
///
/// The allocator function used for this range.
///
/// The allocator function used for this range.
public AllocatorHandle Allocator; // 12
///
/// Deallocates the memory represented by this range.
///
///
/// Same as disposing the which contains this range.
///
/// Cannot be used with allocators which need the allocation size to deallocate.
///
public void Dispose()
{
Block block = new Block { Range = this };
block.Dispose();
this = block.Range;
}
}
///
/// Represents an individual allocation within an allocator.
///
/// A block consists of a plus metadata about the type of elements for which the block was allocated.
[StructLayout(LayoutKind.Sequential)]
public struct Block : IDisposable
{
///
/// The range of memory encompassed by this block.
///
/// The range of memory encompassed by this block.
public Range Range;
///
/// Number of bytes per item.
///
/// Number of bytes per item.
public int BytesPerItem;
///
/// Number of items allocated for.
///
/// Number of items allocated for.
public int AllocatedItems;
///
/// Log2 of the byte alignment.
///
/// The alignment must always be power of 2. Storing the alignment as its log2 helps enforces this.
/// Log2 of the byte alignment.
public byte Log2Alignment;
///
/// This field only exists to pad the `Block` struct. Ignore it.
///
/// This field only exists to pad the `Block` struct. Ignore it.
public byte Padding0;
///
/// This field only exists to pad the `Block` struct. Ignore it.
///
/// This field only exists to pad the `Block` struct. Ignore it.
public ushort Padding1;
///
/// This field only exists to pad the `Block` struct. Ignore it.
///
/// This field only exists to pad the `Block` struct. Ignore it.
public uint Padding2;
///
/// Number of bytes requested for this block.
///
/// The actual allocation size may be larger due to alignment.
/// Number of bytes requested for this block.
public long Bytes => BytesPerItem * Range.Items;
///
/// Number of bytes allocated for this block.
///
/// The requested allocation size may be smaller. Any excess is due to alignment
/// Number of bytes allocated for this block.
public long AllocatedBytes => BytesPerItem * AllocatedItems;
///
/// The alignment.
///
/// Must be power of 2 that's greater than or equal to 0.
///
/// Set alignment *before* the allocation is made. Setting it after has no effect on the allocation.
/// A new alignment. If not a power of 2, it will be rounded up to the next largest power of 2.
/// The alignment.
public int Alignment
{
get => 1 << Log2Alignment;
set => Log2Alignment = (byte)(32 - math.lzcnt(math.max(1, value) - 1));
}
///
/// Deallocates this block.
///
/// Same as .
public void Dispose()
{
TryFree();
}
///
/// Attempts to allocate this block.
///
/// 0 if successful. Otherwise, returns the error code from the allocator function.
public int TryAllocate()
{
Range.Pointer = IntPtr.Zero;
return Try(ref this);
}
///
/// Attempts to free this block.
///
/// 0 if successful. Otherwise, returns the error code from the allocator function.
public int TryFree()
{
Range.Items = 0;
return Try(ref this);
}
///
/// Allocates this block.
///
/// Thrown if safety checks are enabled and the allocation fails.
public void Allocate()
{
var error = TryAllocate();
CheckFailedToAllocate(error);
}
///
/// Frees the block.
///
/// Thrown if safety checks are enabled and the deallocation fails.
public void Free()
{
var error = TryFree();
CheckFailedToFree(error);
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
void CheckFailedToAllocate(int error)
{
if (error != 0)
throw new ArgumentException($"Error {error}: Failed to Allocate {this}");
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
void CheckFailedToFree(int error)
{
if (error != 0)
throw new ArgumentException($"Error {error}: Failed to Free {this}");
}
}
///
/// An allocator function pointer.
///
public interface IAllocator : IDisposable
{
///
/// The allocator function. It can allocate, deallocate, or reallocate.
///
TryFunction Function { get; }
///
/// Invoke the allocator function.
///
/// The block to allocate, deallocate, or reallocate. See
/// 0 if successful. Otherwise, returns the error code from the allocator function.
int Try(ref Block block);
///
/// This allocator.
///
/// This allocator.
AllocatorHandle Handle { get; set; }
///
/// Cast the Allocator index into Allocator
///
Allocator ToAllocator { get; }
///
/// Check whether an allocator is a custom allocator
///
bool IsCustomAllocator { get; }
}
///
/// Memory allocation Success status
///
public const int kErrorNone = 0;
///
/// Memory allocation Buffer Overflow status
///
public const int kErrorBufferOverflow = -1;
#if !UNITY_IOS
[BurstDiscard]
private static void CheckDelegate(ref bool useDelegate)
{
//@TODO: This should use BurstCompiler.IsEnabled once that is available as an efficient API.
useDelegate = true;
}
private static bool UseDelegate()
{
bool result = false;
CheckDelegate(ref result);
return result;
}
#endif
private static int allocate_block(ref Block block)
{
TableEntry tableEntry = default;
tableEntry = block.Range.Allocator.TableEntry;
var function = new FunctionPointer(tableEntry.function);
// this is a path for bursted caller, for non-Burst C#, it generates garbage each time we call Invoke
return function.Invoke(tableEntry.state, ref block);
}
#if !UNITY_IOS
[BurstDiscard]
private static void forward_mono_allocate_block(ref Block block, ref int error)
{
TableEntry tableEntry = default;
tableEntry = block.Range.Allocator.TableEntry;
var index = block.Range.Allocator.Handle.Index;
if (index >= Managed.kMaxNumCustomAllocator)
{
throw new ArgumentException("Allocator index into TryFunction delegate table exceeds maximum.");
}
ref TryFunction function = ref Managed.TryFunctionDelegates[block.Range.Allocator.Handle.Index];
error = function(tableEntry.state, ref block);
}
#endif
internal static Allocator LegacyOf(AllocatorHandle handle)
{
if (handle.Value >= FirstUserIndex)
return Allocator.Persistent;
return (Allocator) handle.Value;
}
static unsafe int TryLegacy(ref Block block)
{
if (block.Range.Pointer == IntPtr.Zero) // Allocate
{
block.Range.Pointer = (IntPtr)Memory.Unmanaged.Allocate(block.Bytes, block.Alignment, LegacyOf(block.Range.Allocator));
block.AllocatedItems = block.Range.Items;
return (block.Range.Pointer == IntPtr.Zero) ? -1 : 0;
}
if (block.Bytes == 0) // Free
{
if (LegacyOf(block.Range.Allocator) != Allocator.None)
{
Memory.Unmanaged.Free((void*)block.Range.Pointer, LegacyOf(block.Range.Allocator));
}
block.Range.Pointer = IntPtr.Zero;
block.AllocatedItems = 0;
return 0;
}
// Reallocate (keep existing pointer and change size if possible. otherwise, allocate new thing and copy)
return -1;
}
///
/// Invokes the allocator function of a block.
///
/// The allocator function is looked up from a global table.
///
/// - If the block range's Pointer is null, it will allocate.
/// - If the block range's Pointer is not null, it will reallocate.
/// - If the block range's Items is 0, it will deallocate.
///
/// The block to allocate, deallocate, or reallocate.
/// 0 if successful. Otherwise, returns the error code from the block's allocator function.
public static unsafe int Try(ref Block block)
{
if (block.Range.Allocator.Value < FirstUserIndex)
return TryLegacy(ref block);
TableEntry tableEntry = default;
tableEntry = block.Range.Allocator.TableEntry;
var function = new FunctionPointer(tableEntry.function);
#if ENABLE_UNITY_ALLOCATION_CHECKS
// if the allocator being passed in has a version of 0, that means "whatever the current version is."
// so we patch it here, with whatever the current version is...
if(block.Range.Allocator.Version == 0)
block.Range.Allocator.Version = block.Range.Allocator.OfficialVersion;
#endif
#if !UNITY_IOS
if (UseDelegate())
{
int error = kErrorNone;
forward_mono_allocate_block(ref block, ref error);
return error;
}
#endif
return allocate_block(ref block);
}
///
/// A stack allocator with no storage of its own. Uses the storage of its parent.
///
[BurstCompile(CompileSynchronously = true)]
internal struct StackAllocator : IAllocator, IDisposable
{
public AllocatorHandle Handle { get { return m_handle; } set { m_handle = value; } }
public Allocator ToAllocator { get { return m_handle.ToAllocator; } }
public bool IsCustomAllocator { get { return m_handle.IsCustomAllocator; } }
internal AllocatorHandle m_handle;
internal Block m_storage;
internal long m_top;
public void Initialize(Block storage)
{
m_storage = storage;
m_top = 0;
#if ENABLE_UNITY_ALLOCATION_CHECKS
m_storage.Range.Allocator.AddChildAllocator(Handle);
#endif
}
public unsafe int Try(ref Block block)
{
if (block.Range.Pointer == IntPtr.Zero) // Allocate
{
if (m_top + block.Bytes > m_storage.Bytes)
{
return -1;
}
block.Range.Pointer = (IntPtr)((byte*)m_storage.Range.Pointer + m_top);
block.AllocatedItems = block.Range.Items;
m_top += block.Bytes;
return 0;
}
if (block.Bytes == 0) // Free
{
if ((byte*)block.Range.Pointer - (byte*)m_storage.Range.Pointer == (long)(m_top - block.AllocatedBytes))
{
m_top -= block.AllocatedBytes;
var blockSizeInBytes = block.AllocatedItems * block.BytesPerItem;
block.Range.Pointer = IntPtr.Zero;
block.AllocatedItems = 0;
return 0;
}
return -1;
}
// Reallocate (keep existing pointer and change size if possible. otherwise, allocate new thing and copy)
return -1;
}
[BurstCompile(CompileSynchronously = true)]
[MonoPInvokeCallback(typeof(TryFunction))]
public static unsafe int Try(IntPtr allocatorState, ref Block block)
{
return ((StackAllocator*)allocatorState)->Try(ref block);
}
public TryFunction Function => Try;
public void Dispose()
{
m_handle.Rewind();
}
}
///
/// Slab allocator with no backing storage.
///
[BurstCompile(CompileSynchronously = true)]
internal struct SlabAllocator : IAllocator, IDisposable
{
public AllocatorHandle Handle { get { return m_handle; } set { m_handle = value; } }
public Allocator ToAllocator { get { return m_handle.ToAllocator; } }
public bool IsCustomAllocator { get { return m_handle.IsCustomAllocator; } }
internal AllocatorHandle m_handle;
internal Block Storage;
internal int Log2SlabSizeInBytes;
internal FixedList4096Bytes Occupied;
internal long budgetInBytes;
internal long allocatedBytes;
public long BudgetInBytes => budgetInBytes;
public long AllocatedBytes => allocatedBytes;
internal int SlabSizeInBytes
{
get => 1 << Log2SlabSizeInBytes;
set => Log2SlabSizeInBytes = (byte)(32 - math.lzcnt(math.max(1, value) - 1));
}
internal int Slabs => (int)(Storage.Bytes >> Log2SlabSizeInBytes);
internal void Initialize(Block storage, int slabSizeInBytes, long budget)
{
#if ENABLE_UNITY_ALLOCATION_CHECKS
storage.Range.Allocator.AddChildAllocator(Handle);
#endif
Assert.IsTrue((slabSizeInBytes & (slabSizeInBytes - 1)) == 0);
Storage = storage;
Log2SlabSizeInBytes = 0;
Occupied = default;
budgetInBytes = budget;
allocatedBytes = 0;
SlabSizeInBytes = slabSizeInBytes;
Occupied.Length = (Slabs + 31) / 32;
}
public int Try(ref Block block)
{
if (block.Range.Pointer == IntPtr.Zero) // Allocate
{
if (block.Bytes + allocatedBytes > budgetInBytes)
return -2; //over allocator budget
if (block.Bytes > SlabSizeInBytes)
return -1;
for (var wordIndex = 0; wordIndex < Occupied.Length; ++wordIndex)
{
var word = Occupied[wordIndex];
if (word == -1)
continue;
for (var bitIndex = 0; bitIndex < 32; ++bitIndex)
if ((word & (1 << bitIndex)) == 0)
{
Occupied[wordIndex] |= 1 << bitIndex;
block.Range.Pointer = Storage.Range.Pointer +
(int)(SlabSizeInBytes * (wordIndex * 32U + bitIndex));
block.AllocatedItems = SlabSizeInBytes / block.BytesPerItem;
allocatedBytes += block.Bytes;
return 0;
}
}
return -1;
}
if (block.Bytes == 0) // Free
{
var slabIndex = ((ulong)block.Range.Pointer - (ulong)Storage.Range.Pointer) >>
Log2SlabSizeInBytes;
int wordIndex = (int)(slabIndex >> 5);
int bitIndex = (int)(slabIndex & 31);
Occupied[wordIndex] &= ~(1 << bitIndex);
block.Range.Pointer = IntPtr.Zero;
var blockSizeInBytes = block.AllocatedItems * block.BytesPerItem;
allocatedBytes -= blockSizeInBytes;
block.AllocatedItems = 0;
return 0;
}
// Reallocate (keep existing pointer and change size if possible. otherwise, allocate new thing and copy)
return -1;
}
[BurstCompile(CompileSynchronously = true)]
[MonoPInvokeCallback(typeof(TryFunction))]
public static unsafe int Try(IntPtr allocatorState, ref Block block)
{
return ((SlabAllocator*)allocatorState)->Try(ref block);
}
public TryFunction Function => Try;
public void Dispose()
{
m_handle.Rewind();
}
}
internal struct TableEntry
{
internal IntPtr function;
internal IntPtr state;
}
internal struct Array16 where T : unmanaged
{
internal T f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15;
}
internal struct Array256 where T : unmanaged
{
internal Array16 f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15;
}
internal struct Array4096 where T : unmanaged
{
internal Array256 f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14, f15;
}
internal struct Array32768 : IIndexable where T : unmanaged
{
internal Array4096 f0, f1, f2, f3, f4, f5, f6, f7;
public int Length { get { return 32768; } set {} }
public ref T ElementAt(int index)
{
unsafe { fixed(Array4096* p = &f0) { return ref UnsafeUtility.AsRef((T*)p + index); } }
}
}
///
/// Contains arrays of the allocator function pointers.
///
internal sealed class SharedStatics
{
internal sealed class IsInstalled { internal static readonly SharedStatic Ref = SharedStatic.GetOrCreate(); }
internal sealed class TableEntry { internal static readonly SharedStatic> Ref = SharedStatic>.GetOrCreate(); }
#if ENABLE_UNITY_ALLOCATION_CHECKS
internal sealed class Version { internal static readonly SharedStatic> Ref = SharedStatic>.GetOrCreate(); }
internal sealed class ChildSafetyHandles { internal static readonly SharedStatic>> Ref = SharedStatic>>.GetOrCreate(); }
internal sealed class ChildAllocators { internal static readonly SharedStatic>> Ref = SharedStatic>>.GetOrCreate(); }
internal sealed class Parent { internal static readonly SharedStatic> Ref = SharedStatic>.GetOrCreate(); }
internal sealed class IndexInParent { internal static readonly SharedStatic> Ref = SharedStatic>.GetOrCreate(); }
#endif
}
internal static class Managed
{
#if !UNITY_IOS
///
/// Memory allocation status
///
internal const int kMaxNumCustomAllocator = 32768;
internal static TryFunction[] TryFunctionDelegates = new TryFunction[kMaxNumCustomAllocator];
#endif
///
/// Register TryFunction delegates for managed caller to avoid garbage collections
///
/// Index into the TryFunction delegates table.
/// TryFunction delegate to be registered.
[NotBurstCompatible]
public static void RegisterDelegate(int index, TryFunction function)
{
#if !UNITY_IOS
if(index >= kMaxNumCustomAllocator)
{
throw new ArgumentException("index to be registered in TryFunction delegate table exceeds maximum.");
}
// Register TryFunction delegates for managed caller to avoid garbage collections
Managed.TryFunctionDelegates[index] = function;
#endif
}
///
/// Unregister TryFunction delegate
///
/// Index into the TryFunction delegates table.
[NotBurstCompatible]
public static void UnregisterDelegate(int index)
{
#if !UNITY_IOS
if (index >= kMaxNumCustomAllocator)
{
throw new ArgumentException("index to be unregistered in TryFunction delegate table exceeds maximum.");
}
Managed.TryFunctionDelegates[index] = default;
#endif
}
}
///
/// For internal use only.
///
public static void Initialize()
{
}
///
/// Saves an allocator's function pointers at a particular index in the global function table.
///
/// The global function table index at which to install the allocator function.
/// IntPtr to allocator's custom state.
/// The allocator function to install in the global function table.
/// The allocator function to install in the global function table.
internal static void Install(AllocatorHandle handle,
IntPtr allocatorState,
FunctionPointer functionPointer,
TryFunction function)
{
if(functionPointer.Value == IntPtr.Zero)
handle.Unregister();
else
{
int error = ConcurrentMask.TryAllocate(ref SharedStatics.IsInstalled.Ref.Data, handle.Value, 1);
if (ConcurrentMask.Succeeded(error))
{
handle.Install(new TableEntry { state = allocatorState, function = functionPointer.Value });
Managed.RegisterDelegate(handle.Index, function);
}
}
}
///
/// Saves an allocator's function pointers at a particular index in the global function table.
///
/// The global function table index at which to install the allocator function.
/// IntPtr to allocator's custom state.
/// The allocator function to install in the global function table.
internal static void Install(AllocatorHandle handle, IntPtr allocatorState, TryFunction function)
{
var functionPointer = (function == null)
? new FunctionPointer(IntPtr.Zero)
: BurstCompiler.CompileFunctionPointer(function);
Install(handle, allocatorState, functionPointer, function);
}
///
/// Saves an allocator's function pointers in a free slot of the global function table. Thread safe.
///
/// IntPtr to allocator's custom state.
/// Function pointer to create or save in the function table.
/// Returns a handle to the newly registered allocator function.
internal static AllocatorHandle Register(IntPtr allocatorState, FunctionPointer functionPointer)
{
var tableEntry = new TableEntry { state = allocatorState, function = functionPointer.Value };
var error = ConcurrentMask.TryAllocate(ref SharedStatics.IsInstalled.Ref.Data, out int offset, (FirstUserIndex+63)>>6, SharedStatics.IsInstalled.Ref.Data.Length, 1);
AllocatorHandle handle = default;
if(ConcurrentMask.Succeeded(error))
{
handle.Index = (ushort)offset;
handle.Install(tableEntry);
#if ENABLE_UNITY_ALLOCATION_CHECKS
handle.Version = handle.OfficialVersion;
#endif
}
return handle;
}
///
/// Saves an allocator's function pointers in a free slot of the global function table. Thread safe.
///
/// The type of allocator to register.
/// Reference to the allocator.
[NotBurstCompatible]
public static unsafe void Register(ref this T t) where T : unmanaged, IAllocator
{
var functionPointer = (t.Function == null)
? new FunctionPointer(IntPtr.Zero)
: BurstCompiler.CompileFunctionPointer(t.Function);
t.Handle = Register((IntPtr)UnsafeUtility.AddressOf(ref t), functionPointer);
Managed.RegisterDelegate(t.Handle.Index, t.Function);
#if ENABLE_UNITY_ALLOCATION_CHECKS
if (!t.Handle.IsValid)
throw new InvalidOperationException("Allocator registration succeeded, but failed to produce valid handle.");
#endif
}
///
/// Removes an allocator's function pointers from the global function table, without managed code
///
/// The type of allocator to unregister.
/// Reference to the allocator.
public static void UnmanagedUnregister(ref this T t) where T : unmanaged, IAllocator
{
if(t.Handle.IsInstalled)
{
t.Handle.Install(default);
ConcurrentMask.TryFree(ref SharedStatics.IsInstalled.Ref.Data, t.Handle.Value, 1);
}
}
///
/// Removes an allocator's function pointers from the global function table.
///
/// The type of allocator to unregister.
/// Reference to the allocator.
[NotBurstCompatible]
public static void Unregister(ref this T t) where T : unmanaged, IAllocator
{
if(t.Handle.IsInstalled)
{
t.Handle.Install(default);
ConcurrentMask.TryFree(ref SharedStatics.IsInstalled.Ref.Data, t.Handle.Value, 1);
Managed.UnregisterDelegate(t.Handle.Index);
}
}
///
/// Create a custom allocator by allocating a backing storage to store the allocator and then register it
///
/// The type of allocator to create.
/// Allocator used to allocate backing storage.
/// Returns reference to the newly created allocator.
[NotBurstCompatible]
internal static ref T CreateAllocator(AllocatorHandle backingAllocator)
where T : unmanaged, IAllocator
{
unsafe
{
var allocatorPtr = (T*)Memory.Unmanaged.Allocate(UnsafeUtility.SizeOf(), 16, backingAllocator);
*allocatorPtr = default;
ref T allocator = ref UnsafeUtility.AsRef(allocatorPtr);
Register(ref allocator);
return ref allocator;
}
}
///
/// Destroy a custom allocator by unregistering the allocator and freeing its backing storage
///
/// The type of allocator to destroy.
/// Reference to the allocator.
/// Allocator used in allocating the backing storage.
[NotBurstCompatible]
internal static void DestroyAllocator(ref this T t, AllocatorHandle backingAllocator)
where T : unmanaged, IAllocator
{
Unregister(ref t);
unsafe
{
var allocatorPtr = UnsafeUtility.AddressOf(ref t);
Memory.Unmanaged.Free(allocatorPtr, backingAllocator);
}
}
///
/// For internal use only.
///
public static void Shutdown()
{
}
///
/// Index in the global function table of the first user-defined allocator.
///
/// The indexes from 0 up to `FirstUserIndex` are reserved and so should not be used for your own allocators.
/// Index in the global function table of the first user-defined allocator.
public const ushort FirstUserIndex = 64;
internal static bool IsCustomAllocator(AllocatorHandle allocator)
{
return allocator.Index >= FirstUserIndex;
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
internal static void CheckFailedToAllocate(int error)
{
if (error != 0)
throw new ArgumentException("failed to allocate");
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
internal static void CheckFailedToFree(int error)
{
if (error != 0)
throw new ArgumentException("failed to free");
}
[Conditional("ENABLE_UNITY_ALLOCATION_CHECKS")]
internal static void CheckValid(AllocatorHandle handle)
{
#if ENABLE_UNITY_ALLOCATION_CHECKS
if(handle.IsValid == false)
throw new ArgumentException("allocator handle is not valid.");
#endif
}
}
///
/// Provides a wrapper for custom allocator.
///
[BurstCompatible(GenericTypeArguments = new[] { typeof(AllocatorManager.AllocatorHandle) })]
public unsafe struct AllocatorHelper : IDisposable
where T : unmanaged, AllocatorManager.IAllocator
{
///
/// Pointer to a custom allocator.
///
readonly T* m_allocator;
///
/// Allocator used to allocate backing storage of T.
///
AllocatorManager.AllocatorHandle m_backingAllocator;
///
/// Get the custom allocator.
///
public ref T Allocator => ref UnsafeUtility.AsRef(m_allocator);
///
/// Allocate the custom allocator from backingAllocator and register it.
///
/// Allocator used to allocate backing storage.
[NotBurstCompatible]
public AllocatorHelper(AllocatorManager.AllocatorHandle backingAllocator)
{
ref var allocator = ref AllocatorManager.CreateAllocator(backingAllocator);
m_allocator = (T*)UnsafeUtility.AddressOf(ref allocator);
m_backingAllocator = backingAllocator;
}
///
/// Dispose the custom allocator backing memory and unregister it.
///
[NotBurstCompatible]
public void Dispose()
{
ref var allocator = ref UnsafeUtility.AsRef(m_allocator);
AllocatorManager.DestroyAllocator(ref allocator, m_backingAllocator);
}
}
}
#pragma warning restore 0649