8#include "HAL/Allocators/CachedOSPageAllocator.h"
9#include "HAL/Allocators/PooledVirtualMemoryAllocator.h"
10#include "HAL/CriticalSection.h"
11#include "HAL/LowLevelMemTracker.h"
12#include "HAL/MallocBinnedCommon.h"
13#include "HAL/MemoryBase.h"
14#include "HAL/PlatformMath.h"
15#include "HAL/PlatformMemory.h"
16#include "HAL/PlatformTLS.h"
17#include "HAL/UnrealMemory.h"
18#include "Math/NumericLimits.h"
19#include "Misc/AssertionMacros.h"
20#include "Templates/AlignmentTemplates.h"
21#include "Templates/Atomic.h"
25#define USE_CACHED_PAGE_ALLOCATOR_FOR_LARGE_ALLOCS (0
)
27#ifndef USE_512MB_MAX_MEMORY_PER_BLOCK_SIZE
28#define USE_512MB_MAX_MEMORY_PER_BLOCK_SIZE 0
31#define BINNED3_BASE_PAGE_SIZE 4096
32#define BINNED3_MINIMUM_ALIGNMENT_SHIFT 4
33#define BINNED3_MINIMUM_ALIGNMENT 16
35#ifndef BINNED3_MAX_SMALL_POOL_ALIGNMENT
36#define BINNED3_MAX_SMALL_POOL_ALIGNMENT 128
40#ifndef BINNED3_MAX_SMALL_POOL_SIZE
41#if USE_CACHED_PAGE_ALLOCATOR_FOR_LARGE_ALLOCS
42#define BINNED3_MAX_SMALL_POOL_SIZE (BINNEDCOMMON_MAX_LISTED_SMALL_POOL_SIZE)
44#define BINNED3_MAX_SMALL_POOL_SIZE (128
* 1024
)
47#define BINNED3_SMALL_POOL_COUNT (BINNEDCOMMON_NUM_LISTED_SMALL_POOLS
+ (BINNED3_MAX_SMALL_POOL_SIZE
- BINNEDCOMMON_MAX_LISTED_SMALL_POOL_SIZE
) / BINNED3_BASE_PAGE_SIZE
)
49#if USE_512MB_MAX_MEMORY_PER_BLOCK_SIZE
50#define MAX_MEMORY_PER_BLOCK_SIZE_SHIFT (29
)
52#define MAX_MEMORY_PER_BLOCK_SIZE_SHIFT (30
)
55#define MAX_MEMORY_PER_BLOCK_SIZE (1ull
<< MAX_MEMORY_PER_BLOCK_SIZE_SHIFT
)
58#if !defined(BINNED3_USE_SEPARATE_VM_PER_POOL)
60 #define BINNED3_USE_SEPARATE_VM_PER_POOL (1
)
62 #define BINNED3_USE_SEPARATE_VM_PER_POOL (0
)
66#define DEFAULT_GMallocBinned3PerThreadCaches 1
67#define DEFAULT_GMallocBinned3BundleCount 64
68#define DEFAULT_GMallocBinned3AllocExtra 32
69#define BINNED3_MAX_GMallocBinned3MaxBundlesBeforeRecycle 8
72 #error "AGGRESSIVE_MEMORY_SAVING must be defined"
75 #define DEFAULT_GMallocBinned3BundleSize 8192
77 #define DEFAULT_GMallocBinned3BundleSize 65536
81#define BINNED3_ALLOW_RUNTIME_TWEAKING 0
82#if BINNED3_ALLOW_RUNTIME_TWEAKING
83 extern int32 GMallocBinned3PerThreadCaches;
84 extern int32 GMallocBinned3BundleSize = DEFAULT_GMallocBinned3BundleSize;
85 extern int32 GMallocBinned3BundleCount = DEFAULT_GMallocBinned3BundleCount;
86 extern int32 GMallocBinned3MaxBundlesBeforeRecycle = BINNED3_MAX_GMallocBinned3MaxBundlesBeforeRecycle;
87 extern int32 GMallocBinned3AllocExtra = DEFAULT_GMallocBinned3AllocExtra;
89 #define GMallocBinned3PerThreadCaches DEFAULT_GMallocBinned3PerThreadCaches
90 #define GMallocBinned3BundleSize DEFAULT_GMallocBinned3BundleSize
91 #define GMallocBinned3BundleCount DEFAULT_GMallocBinned3BundleCount
92 #define GMallocBinned3MaxBundlesBeforeRecycle BINNED3_MAX_GMallocBinned3MaxBundlesBeforeRecycle
93 #define GMallocBinned3AllocExtra DEFAULT_GMallocBinned3AllocExtra
97#ifndef BINNED3_ALLOCATOR_STATS
99 #define BINNED3_ALLOCATOR_STATS 0
101 #define BINNED3_ALLOCATOR_STATS 1
106#if BINNED3_ALLOCATOR_STATS
107 #if !(UE_BUILD_SHIPPING || UE_BUILD_TEST)
108 #define BINNED3_ALLOCATOR_PER_BIN_STATS 1
110 #define BINNED3_ALLOCATOR_PER_BIN_STATS 0
113 #define BINNED3_ALLOCATOR_PER_BIN_STATS 0
116PRAGMA_DISABLE_UNSAFE_TYPECAST_WARNINGS
121class FMallocBinned3 :
public FMalloc
124 struct FPoolInfoLarge;
125 struct FPoolInfoSmall;
127 struct PoolHashBucket;
139 FORCEINLINE FFreeBlock(uint32 InPageSize, uint32 InBlockSize, uint8 InPoolIndex)
140 : BlockSizeShifted(InBlockSize >> BINNED3_MINIMUM_ALIGNMENT_SHIFT)
141 , PoolIndex(InPoolIndex)
142 , Canary(CANARY_VALUE)
146 NumFreeBlocks = InPageSize / InBlockSize;
151 return NumFreeBlocks;
155 return Canary == FFreeBlock::CANARY_VALUE;
166 void CanaryFail()
const;
171 return (uint8*)
this + NumFreeBlocks* (uint32(BlockSizeShifted) << BINNED3_MINIMUM_ALIGNMENT_SHIFT);
174 uint16 BlockSizeShifted;
177 uint32 NumFreeBlocks;
178 uint32 NextFreeIndex;
185 uint16 BlocksPerBlockOfBlocks;
186 uint8 PagesPlatformForBlockOfBlocks;
188 FBitTree BlockOfBlockAllocationBits;
189 FBitTree BlockOfBlockIsExhausted;
191 uint32 NumEverUsedBlockOfBlocks;
192 FPoolInfoSmall** PoolInfos;
194 uint64 UnusedAreaOffsetLow;
196#if BINNED3_ALLOCATOR_PER_BIN_STATS
198 TAtomic<int64> TotalRequestedAllocSize;
199 TAtomic<int64> TotalAllocCount;
200 TAtomic<int64> TotalFreeCount;
202 FORCEINLINE
void HeadEndAlloc(SIZE_T Size)
204 check(Size >= 0 && Size <= BlockSize);
205 TotalRequestedAllocSize += Size;
208 FORCEINLINE
void HeadEndFree()
222 struct FPtrToPoolMapping
225 : PtrToPoolPageBitShift(0)
231 explicit FPtrToPoolMapping(uint32 InPageSize, uint64 InNumPoolsPerPage, uint64 AddressLimit)
233 Init(InPageSize, InNumPoolsPerPage, AddressLimit);
236 void Init(uint32 InPageSize, uint64 InNumPoolsPerPage, uint64 AddressLimit)
238 uint64 PoolPageToPoolBitShift = FPlatformMath::CeilLogTwo(InNumPoolsPerPage);
240 PtrToPoolPageBitShift = FPlatformMath::CeilLogTwo(InPageSize);
241 HashKeyShift = PtrToPoolPageBitShift + PoolPageToPoolBitShift;
242 PoolMask = (1ull << PoolPageToPoolBitShift) - 1;
243 MaxHashBuckets = AddressLimit >> HashKeyShift;
246 FORCEINLINE void GetHashBucketAndPoolIndices(
const void* InPtr, uint32& OutBucketIndex, UPTRINT& OutBucketCollision, uint32& OutPoolIndex)
const
248 OutBucketCollision = (UPTRINT)InPtr >> HashKeyShift;
249 OutBucketIndex = uint32(OutBucketCollision & (MaxHashBuckets - 1));
250 OutPoolIndex = ((UPTRINT)InPtr >> PtrToPoolPageBitShift) & PoolMask;
255 return MaxHashBuckets;
260 uint64 PtrToPoolPageBitShift;
269 uint64 MaxHashBuckets;
272 FPtrToPoolMapping PtrToPoolMapping;
275 FPoolTable SmallPoolTables[BINNED3_SMALL_POOL_COUNT];
277 uint32 SmallPoolInfosPerPlatformPage;
279 PoolHashBucket* HashBuckets;
280 PoolHashBucket* HashBucketFreeList;
281 uint64 NumLargePoolsPerPage;
283 FCriticalSection Mutex;
287 FBundleNode* NextNodeInCurrentBundle;
290 FBundleNode* NextBundle;
310 Node->NextNodeInCurrentBundle = Head;
311 Node->NextBundle =
nullptr;
318 FBundleNode* Result = Head;
321 Head = Head->NextNodeInCurrentBundle;
328 static_assert(
sizeof(FBundleNode) <= BINNED3_MINIMUM_ALIGNMENT,
"Bundle nodes must fit into the smallest block size");
330 struct FFreeBlockList
333 FORCEINLINE bool PushToFront(
void* InPtr, uint32 InPoolIndex, uint32 InBlockSize)
337 if ((PartialBundle.Count >= (uint32)GMallocBinned3BundleCount) | (PartialBundle.Count * InBlockSize >= (uint32)GMallocBinned3BundleSize))
343 FullBundle = PartialBundle;
344 PartialBundle.Reset();
346 PartialBundle.PushHead((FBundleNode*)InPtr);
349 FORCEINLINE bool CanPushToFront(uint32 InPoolIndex, uint32 InBlockSize)
351 return !((!!FullBundle.Head) & ((PartialBundle.Count >= (uint32)GMallocBinned3BundleCount) | (PartialBundle.Count * InBlockSize >= (uint32)GMallocBinned3BundleSize)));
355 if ((!PartialBundle.Head) & (!!FullBundle.Head))
357 PartialBundle = FullBundle;
360 return PartialBundle.Head ? PartialBundle.PopHead() :
nullptr;
364 FBundleNode* RecyleFull(uint32 InPoolIndex);
365 bool ObtainPartial(uint32 InPoolIndex);
366 FBundleNode* PopBundles(uint32 InPoolIndex);
368 FBundle PartialBundle;
372 struct FPerThreadFreeBlockLists
376 return FMallocBinned3::Binned3TlsSlot ? (FPerThreadFreeBlockLists*)FPlatformTLS::GetTlsValue(FMallocBinned3::Binned3TlsSlot) :
nullptr;
378 static void SetTLS();
379 static void ClearTLS();
381 FPerThreadFreeBlockLists()
382#if BINNED3_ALLOCATOR_STATS
389 return FreeLists[InPoolIndex].PopFromFront(InPoolIndex);
392 FORCEINLINE bool Free(
void* InPtr, uint32 InPoolIndex, uint32 InBlockSize)
394 return FreeLists[InPoolIndex].PushToFront(InPtr, InPoolIndex, InBlockSize);
397 FORCEINLINE bool CanFree(uint32 InPoolIndex, uint32 InBlockSize)
399 return FreeLists[InPoolIndex].CanPushToFront(InPoolIndex, InBlockSize);
402 FBundleNode* RecycleFullBundle(uint32 InPoolIndex)
404 return FreeLists[InPoolIndex].RecyleFull(InPoolIndex);
407 bool ObtainRecycledPartial(uint32 InPoolIndex)
409 return FreeLists[InPoolIndex].ObtainPartial(InPoolIndex);
411 FBundleNode* PopBundles(uint32 InPoolIndex)
413 return FreeLists[InPoolIndex].PopBundles(InPoolIndex);
415#if BINNED3_ALLOCATOR_STATS
417 int64 AllocatedMemory;
418 static TAtomic<int64> ConsolidatedMemory;
421 FFreeBlockList FreeLists[BINNED3_SMALL_POOL_COUNT];
424#if !BINNED3_USE_SEPARATE_VM_PER_POOL
425 FORCEINLINE uint64 PoolIndexFromPtr(
const void* Ptr)
427 return (UPTRINT(Ptr) - UPTRINT(Binned3BaseVMPtr)) >> MAX_MEMORY_PER_BLOCK_SIZE_SHIFT;
429 FORCEINLINE uint8* PoolBasePtr(uint32 InPoolIndex)
431 return Binned3BaseVMPtr + InPoolIndex * MAX_MEMORY_PER_BLOCK_SIZE;
434#if BINNED3_ALLOCATOR_STATS
435 void RecordPoolSearch(uint32 Tests);
442 FORCEINLINE uint64 PoolIndexFromPtr(
const void* Ptr)
444 if (PoolSearchDiv == 0)
446 return (UPTRINT(Ptr) - UPTRINT(PoolBaseVMPtr[0])) >> MAX_MEMORY_PER_BLOCK_SIZE_SHIFT;
448 uint64 PoolIndex = BINNED3_SMALL_POOL_COUNT;
449 if (((uint8*)Ptr >= PoolBaseVMPtr[0]) & ((uint8*)Ptr < HighestPoolBaseVMPtr + MAX_MEMORY_PER_BLOCK_SIZE))
451 PoolIndex = uint64((uint8*)Ptr - PoolBaseVMPtr[0]) / PoolSearchDiv;
452 if (PoolIndex >= BINNED3_SMALL_POOL_COUNT)
454 PoolIndex = BINNED3_SMALL_POOL_COUNT - 1;
457 if ((uint8*)Ptr < PoolBaseVMPtr[PoolIndex])
463 check(PoolIndex < BINNED3_SMALL_POOL_COUNT);
464 }
while ((uint8*)Ptr < PoolBaseVMPtr[PoolIndex]);
465 if ((uint8*)Ptr >= PoolBaseVMPtr[PoolIndex] + MAX_MEMORY_PER_BLOCK_SIZE)
467 PoolIndex = BINNED3_SMALL_POOL_COUNT;
470 else if ((uint8*)Ptr >= PoolBaseVMPtr[PoolIndex] + MAX_MEMORY_PER_BLOCK_SIZE)
476 check(PoolIndex < BINNED3_SMALL_POOL_COUNT);
477 }
while ((uint8*)Ptr >= PoolBaseVMPtr[PoolIndex] + MAX_MEMORY_PER_BLOCK_SIZE);
478 if ((uint8*)Ptr < PoolBaseVMPtr[PoolIndex])
480 PoolIndex = BINNED3_SMALL_POOL_COUNT;
483 RecordPoolSearch(Tests);
490 return PoolBaseVMPtr[InPoolIndex];
493 FORCEINLINE uint32 PoolIndexFromPtrChecked(
const void* Ptr)
495 uint64 Result = PoolIndexFromPtr(Ptr);
496 check(Result < BINNED3_SMALL_POOL_COUNT);
497 return (uint32)Result;
502 return PoolIndexFromPtr(Ptr) >= BINNED3_SMALL_POOL_COUNT;
506 FORCEINLINE void* BlockOfBlocksPointerFromContainedPtr(
const void* Ptr, uint8 PagesPlatformForBlockOfBlocks, uint32& OutBlockOfBlocksIndex)
508 uint32 PoolIndex = PoolIndexFromPtrChecked(Ptr);
509 uint8* PoolStart = PoolBasePtr(PoolIndex);
510 uint64 BlockOfBlocksIndex = (UPTRINT(Ptr) - UPTRINT(PoolStart)) / (UPTRINT(PagesPlatformForBlockOfBlocks) * UPTRINT(OsAllocationGranularity));
511 OutBlockOfBlocksIndex = BlockOfBlocksIndex;
513 uint8* Result = PoolStart + BlockOfBlocksIndex * UPTRINT(PagesPlatformForBlockOfBlocks) * UPTRINT(OsAllocationGranularity);
515 check(Result < PoolStart + MAX_MEMORY_PER_BLOCK_SIZE);
518 FORCEINLINE uint8* BlockPointerFromIndecies(uint32 InPoolIndex, uint32 BlockOfBlocksIndex, uint32 BlockOfBlocksSize)
520 uint8* PoolStart = PoolBasePtr(InPoolIndex);
521 uint8* Ptr = PoolStart + BlockOfBlocksIndex * uint64(BlockOfBlocksSize);
522 check(Ptr + BlockOfBlocksSize <= PoolStart + MAX_MEMORY_PER_BLOCK_SIZE);
525 FPoolInfoSmall* PushNewPoolToFront(FPoolTable& Table, uint32 InBlockSize, uint32 InPoolIndex, uint32& OutBlockOfBlocksIndex);
526 FPoolInfoSmall* GetFrontPool(FPoolTable& Table, uint32 InPoolIndex, uint32& OutBlockOfBlocksIndex);
533 virtual ~FMallocBinned3();
536 virtual bool IsInternallyThreadSafe()
const override;
537 FORCEINLINE virtual void* Malloc(SIZE_T Size, uint32 Alignment) override
539 void* Result =
nullptr;
543 if ((Size <= BINNED3_MAX_SMALL_POOL_SIZE) & (Alignment <= BINNED3_MINIMUM_ALIGNMENT))
545 FPerThreadFreeBlockLists* Lists = GMallocBinned3PerThreadCaches ? FPerThreadFreeBlockLists::Get() :
nullptr;
548 uint32 PoolIndex = BoundSizeToPoolIndex(Size);
549 uint32 BlockSize = PoolIndexToBlockSize(PoolIndex);
550 Result = Lists->Malloc(PoolIndex);
551#if BINNED3_ALLOCATOR_STATS
554 SmallPoolTables[PoolIndex].HeadEndAlloc(Size);
555 Lists->AllocatedMemory += BlockSize;
560 if (Result ==
nullptr)
562 Result = MallocExternal(Size, Alignment);
567 FORCEINLINE virtual void* Realloc(
void* Ptr, SIZE_T NewSize, uint32 Alignment) override
569 if (NewSize <= BINNED3_MAX_SMALL_POOL_SIZE && Alignment <= BINNED3_MINIMUM_ALIGNMENT)
571 FPerThreadFreeBlockLists* Lists = GMallocBinned3PerThreadCaches ? FPerThreadFreeBlockLists::Get() :
nullptr;
573 uint64 PoolIndex = PoolIndexFromPtr(Ptr);
574 if ((!!Lists) & ((!Ptr) | (PoolIndex < BINNED3_SMALL_POOL_COUNT)))
576 uint32 BlockSize = 0;
578 bool bCanFree =
true;
582 BlockSize = PoolIndexToBlockSize(PoolIndex);
583 if ((!!NewSize) & (NewSize <= BlockSize) & ((!PoolIndex) | (NewSize > PoolIndexToBlockSize(
static_cast<uint32>(PoolIndex - 1)))))
585#if BINNED3_ALLOCATOR_STATS
586 SmallPoolTables[PoolIndex].HeadEndAlloc(NewSize);
587 SmallPoolTables[PoolIndex].HeadEndFree();
591 bCanFree = Lists->CanFree(PoolIndex, BlockSize);
595 uint32 NewPoolIndex = BoundSizeToPoolIndex(NewSize);
596 uint32 NewBlockSize = PoolIndexToBlockSize(NewPoolIndex);
597 void* Result = NewSize ? Lists->Malloc(NewPoolIndex) :
nullptr;
598#if BINNED3_ALLOCATOR_STATS
601 SmallPoolTables[NewPoolIndex].HeadEndAlloc(NewSize);
602 Lists->AllocatedMemory += NewBlockSize;
605 if (Result || !NewSize)
609 FMemory::Memcpy(Result, Ptr, FPlatformMath::Min<SIZE_T>(NewSize, BlockSize));
613 bool bDidPush = Lists->Free(Ptr, PoolIndex, BlockSize);
615#if BINNED3_ALLOCATOR_STATS
616 SmallPoolTables[PoolIndex].HeadEndFree();
617 Lists->AllocatedMemory -= BlockSize;
626 void* Result = ReallocExternal(Ptr, NewSize, Alignment);
632 uint64 PoolIndex = PoolIndexFromPtr(Ptr);
633 if (PoolIndex < BINNED3_SMALL_POOL_COUNT)
635 FPerThreadFreeBlockLists* Lists = GMallocBinned3PerThreadCaches ? FPerThreadFreeBlockLists::Get() :
nullptr;
638 int32 BlockSize = PoolIndexToBlockSize(PoolIndex);
639 if (Lists->Free(Ptr, PoolIndex, BlockSize))
641#if BINNED3_ALLOCATOR_STATS
642 SmallPoolTables[PoolIndex].HeadEndFree();
643 Lists->AllocatedMemory -= BlockSize;
651 FORCEINLINE virtual bool GetAllocationSize(
void *Ptr, SIZE_T &SizeOut) override
653 uint64 PoolIndex = PoolIndexFromPtr(Ptr);
654 if (PoolIndex < BINNED3_SMALL_POOL_COUNT)
656 SizeOut = PoolIndexToBlockSize(PoolIndex);
659 return GetAllocationSizeExternal(Ptr, SizeOut);
662 FORCEINLINE virtual SIZE_T QuantizeSize(SIZE_T Count, uint32 Alignment) override
664 static_assert(DEFAULT_ALIGNMENT <= BINNED3_MINIMUM_ALIGNMENT,
"DEFAULT_ALIGNMENT is assumed to be zero");
665 checkSlow((Alignment & (Alignment - 1)) == 0);
667 if ((Count <= BINNED3_MAX_SMALL_POOL_SIZE) & (Alignment <= BINNED3_MINIMUM_ALIGNMENT))
669 SizeOut = PoolIndexToBlockSize(BoundSizeToPoolIndex(Count));
673 Alignment = FPlatformMath::Max<uint32>(Alignment, OsAllocationGranularity);
674 SizeOut = Align(Count, Alignment);
676 check(SizeOut >= Count);
680 virtual bool ValidateHeap() override;
681 virtual void Trim(
bool bTrimThreadCaches) override;
682 virtual void SetupTLSCachesOnCurrentThread() override;
683 virtual void ClearAndDisableTLSCachesOnCurrentThread() override;
684 virtual const TCHAR* GetDescriptiveName() override;
687 void FlushCurrentThreadCache();
688 void* MallocExternal(SIZE_T Size, uint32 Alignment);
689 void* ReallocExternal(
void* Ptr, SIZE_T NewSize, uint32 Alignment);
690 void FreeExternal(
void *Ptr);
691 bool GetAllocationSizeExternal(
void* Ptr, SIZE_T& SizeOut);
693#if BINNED3_ALLOCATOR_STATS
694 int64 GetTotalAllocatedSmallPoolMemory()
const;
696 virtual void GetAllocatorStats( FGenericMemoryStats& out_Stats ) override;
698 virtual void DumpAllocatorStats(
class FOutputDevice& Ar) override;
702 static uint16 SmallBlockSizesReversedShifted[BINNED3_SMALL_POOL_COUNT + 1];
703 static FMallocBinned3* MallocBinned3;
704 static uint32 Binned3TlsSlot;
705 static uint32 OsAllocationGranularity;
706#if !BINNED3_USE_SEPARATE_VM_PER_POOL
707 static uint8* Binned3BaseVMPtr;
708 FPlatformMemory::FPlatformVirtualMemoryBlock Binned3BaseVMBlock;
710 static uint64 PoolSearchDiv;
711 static uint8* HighestPoolBaseVMPtr;
712 static uint8* PoolBaseVMPtr[BINNED3_SMALL_POOL_COUNT];
713 FPlatformMemory::FPlatformVirtualMemoryBlock PoolBaseVMBlock[BINNED3_SMALL_POOL_COUNT];
716 static uint8 MemSizeToIndex[1 + (BINNED3_MAX_SMALL_POOL_SIZE >> BINNED3_MINIMUM_ALIGNMENT_SHIFT)];
718 FORCEINLINE uint32 BoundSizeToPoolIndex(SIZE_T Size)
720 auto Index = ((Size + BINNED3_MINIMUM_ALIGNMENT - 1) >> BINNED3_MINIMUM_ALIGNMENT_SHIFT);
721 checkSlow(Index >= 0 && Index <= (BINNED3_MAX_SMALL_POOL_SIZE >> BINNED3_MINIMUM_ALIGNMENT_SHIFT));
722 uint32 PoolIndex = uint32(MemSizeToIndex[Index]);
723 checkSlow(PoolIndex >= 0 && PoolIndex < BINNED3_SMALL_POOL_COUNT);
726 FORCEINLINE uint32 PoolIndexToBlockSize(uint32 PoolIndex)
728 return uint32(SmallBlockSizesReversedShifted[BINNED3_SMALL_POOL_COUNT - PoolIndex - 1]) << BINNED3_MINIMUM_ALIGNMENT_SHIFT;
731 void Commit(uint32 InPoolIndex,
void *Ptr, SIZE_T Size);
732 void Decommit(uint32 InPoolIndex,
void *Ptr, SIZE_T Size);
734 static void* AllocateMetaDataMemory(SIZE_T Size);
737PRAGMA_RESTORE_UNSAFE_TYPECAST_WARNINGS
739#define BINNED3_INLINE (1
)
742 #define FMEMORY_INLINE_FUNCTION_DECORATOR FORCEINLINE
743 #define FMEMORY_INLINE_GMalloc (FMallocBinned3::MallocBinned3)
744 #include "FMemory.inl"
#define UE_BUILD_SHIPPING
#define AGGRESSIVE_MEMORY_SAVING
#define FORCE_ANSI_ALLOCATOR