Ark Server API (ASA) - Wiki
Loading...
Searching...
No Matches
GenericGrowableAllocator.h
Go to the documentation of this file.
1// Copyright Epic Games, Inc. All Rights Reserved.
2
3/*=============================================================================
4 GrowableAllocator.h: Memory allocator that allocates direct memory for pool memory
5=============================================================================*/
6
7#pragma once
8
9#include "CoreMinimal.h"
10#include "HAL/LowLevelMemTracker.h"
11#include "Misc/ScopeLock.h"
12#include "ProfilingDebugging/MemoryTrace.h"
13#include "Stats/Stats.h"
14
15//////////////
16// - Don't use a shared CriticalSection, must pass in to the GrowableAllocator
17// - Use this with GPU allocations
18// - search for @todo here and in the SwitchAudioBuffer code
19// - Move this into the FSwitchAudioDevice
20// - Pass this over some code reviews on slack or something for maybe a better way to structure instead of Template and Pure virtual
21// - Can maybe just make virtuals in FGrowableAllocationBase ??
22/////////////
23
24#define ALLOCATION_HISTOGRAM (!UE_BUILD_SHIPPING && !UE_BUILD_TEST) && 1
25#define ALLOCATION_HISTOGRAM_DETAILED (!UE_BUILD_SHIPPING && !UE_BUILD_TEST) && 0
26
27struct FGrowableAllocationBase
28{
29 uint64 Size;
30 uint32 Offset;
31 uint32 Padding;
33 uint32 OwnerType;
34#endif
35};
36
37// a base class for both the classes below, for usage tracking only
39{
40public:
41 /** Constructor */
44 , TotalWaste(0)
45 , CurrentAllocs(0)
46 , TotalAllocs(0)
47 {}
49 {}
50
51 /**
52 * Returns approximated amount of memory wasted due to allocations' alignment.
53 */
54 virtual uint64 GetWasteApproximation()
55 {
56 double Waste = (static_cast<double>(TotalWaste) / static_cast<double>(TotalAllocs)) * static_cast<double>(CurrentAllocs);
57 return (uint64)Waste;
58 }
59
60protected:
61
62 /** Total amount ever allocated */
64
65 /** The total amount of memory wasted due to allocations' alignment. */
66 uint64 TotalWaste;
67
68 /** The current number of allocations. */
70
71 /** The total number of allocations. */
73};
74
75
76
78{
79public:
80 // Abstract functions that user must implement
81
82 /**
83 * Lets the implementation allocate the backing memory for the chunk
84 *
85 * Implementation needs to pause LLM tracking for ELLMTracker::Default if it's allocating from an allocator that tracks via ELLMTracker::Default
86 *
87 * @param Size Minimum size needed for this allocation. The implementation will likely allocate more, and return that amount
88 * @return The actual size of the chunk that was allocated (could be much larger than Size)
89 */
90 virtual uint64 CreateInternalMemory(uint64 Size) = 0;
91
92 /**
93 * Destroys the backing memory for the chunk
94 *
95 * Implementation needs to pause LLM tracking for ELLMTracker::Default if it's allocating from an allocator that tracks via ELLMTracker::Default
96 *
97 */
98 virtual void DestroyInternalMemory() = 0;
99
100 /**
101 * Creates an implementation specific subclass of FGrowableAllocationBase. Does not need to be initialized (see InitializeAllocationStruct)
102 */
104
105 /**
106 * Destroys the implemtnation object. By default, just deletes it
107 */
108 virtual void DestroyAllocationStruct(FGrowableAllocationBase* Allocation)
109 {
110 delete Allocation;
111 }
112
113 /**
114 * Lets the implementation fill in any specific fields of the allocation struct after the base fields are set up
115 *
116 */
117 virtual void InitializeAllocationStruct(FGrowableAllocationBase* Allocation) = 0;
118
119 /**
120 * Queries the implementation if the given allocation came from this chunk
121 */
122 virtual bool DoesChunkContainAllocation(const FGrowableAllocationBase* Allocation) = 0;
123
124 /**
125 * Queries the implementation if the given address came from this chunk
126 */
127 virtual bool DoesChunkContainAddress(const void* Address) = 0;
128
129
130 /**
131 * Constructor
132 */
133 FGrowableMallocChunk(uint64 InSize, uint32 Type, FCriticalSection* InCriticalSection)
134 : MemoryType(Type)
135 , HeapSize(InSize)
136 , UsedMemorySize(0)
138 , CriticalSection(InCriticalSection)
139 {
140 }
141
143 {
144 // create the pool object (note that this will update and return the new HeapSize for the implementations internal aligned size - we then update how big we track)
146 // entire chunk is free
147 FreeList = new FFreeEntry(NULL, 0, HeapSize);
149 }
150
151 /**
152 * Destructor
153 */
155 {
156 }
157
158 void Destroy()
159 {
160 checkf(IsEmpty(), TEXT("Chunk was not empty when it was destroyed!"));
162 }
163
164 /**
165 * Check free list for an entry big enough to fit the requested Size with Alignment
166 * @param Size - allocation size
167 * @param Alignment - allocation alignment
168 * @return true if available entry was found
169 */
170 bool CanFitEntry(uint32 Size, uint32 Alignment)
171 {
172 // Compute MaxFreeEntrySize if necessary (should only happen if this chunk was just allocated from)
174 {
176 for (FFreeEntry* Entry = FreeList; Entry; Entry = Entry->Next)
177 {
178 MaxFreeEntrySize = FMath::Max(Entry->BlockSize, MaxFreeEntrySize);
179 }
180 }
181
182 // Return false if we trivially don't fit
183 if (Size > MaxFreeEntrySize)
184 {
185 return false;
186 }
187
188 // Return true if we trivially do fit
189 if (Size + Alignment - 1 <= MaxFreeEntrySize)
190 {
191 return true;
192 }
193
194 // Slow method - search the free entries for a free chunk
195 bool bResult = false;
196 for (FFreeEntry *Entry = FreeList; Entry; Entry = Entry->Next)
197 {
198 if (Entry->CanFit(Size, Alignment))
199 {
200 bResult = true;
201 break;
202 }
203 }
204 return bResult;
205 }
206 /**
207 * @return true if this chunk has no used memory
208 */
209 bool IsEmpty()
210 {
211 return UsedMemorySize == 0;
212 }
213
215 {
216 checkSlow(Alignment != 0);
217
218 // multi-thread protection
220
221 // Alignment here is assumed to be for location and size
222 const uint32 AlignedSize = Align<uint32>(Size, Alignment);
223
224 // Update stats.
225 const uint32 WastedSize = AlignedSize - Size;
226 TotalWaste += WastedSize;
228 TotalAllocs++;
229
230 // look for a good free chunk
231 FFreeEntry* Prev = NULL;
232 FFreeEntry* FindEntry = NULL;
233
234 for (FFreeEntry* Entry = FreeList; Entry; Entry = Entry->Next)
235 {
236 if (Entry->CanFit(AlignedSize, Alignment))
237 {
238 FindEntry = Entry;
239 break;
240 }
241 Prev = Entry;
242 }
243
244 if (FindEntry != NULL)
245 {
246 if (FindEntry->BlockSize == MaxFreeEntrySize)
247 {
248 // We're probably about to just split our largest entry, so mark the max size as dirty to indicate it needs recomputing
250 }
251 // Use it, leaving over any unused space
252 UsedMemorySize += AlignedSize;
253 bool bDelete;
254 uint32 Padding;
255 uint32 Offset = FindEntry->Split(AlignedSize, Alignment, bDelete, Padding, MinAllocationSize);
256 if (bDelete)
257 {
258 FFreeEntry*& PrevRef = Prev ? Prev->Next : FreeList;
259 PrevRef = FindEntry->Next;
260 delete FindEntry;
261 }
262
263 FGrowableAllocationBase* Allocation = CreateAllocationStruct();
264 Allocation->Size = AlignedSize;
265 Allocation->Padding = Padding;
266 Allocation->Offset = Offset;
268 Allocation->OwnerType = OwnerType;
269#endif
270 LLM(FLowLevelMemTracker::Get().OnLowLevelAlloc(ELLMTracker::Default, GetAddressForTracking(Offset), Size));
271 MemoryTrace_Alloc(uint64(GetAddressForTracking(Offset)), Size, Alignment, EMemoryTraceRootHeap::SystemMemory);
272
273 // let the implementation fill in any more
274 InitializeAllocationStruct(Allocation);
275 return Allocation;
276 }
277
278 // if no suitable blocks were found, we must fail
279 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Failed to allocate GPU memory (Size: %d)"), AlignedSize);
280 return nullptr;
281 }
282
283 bool Free(FGrowableAllocationBase* Memory)
284 {
285 // multi-thread protection
287
288 uint64 Padding = Memory->Padding;
289 uint64 Size = Memory->Size;
290 uint64 AllocationSize = Padding + Size;
291 uint32 Offset = Memory->Offset;
292
293 LLM(FLowLevelMemTracker::Get().OnLowLevelFree(ELLMTracker::Default, GetAddressForTracking(Offset)));
294 MemoryTrace_Free(uint64(GetAddressForTracking(Offset)), EMemoryTraceRootHeap::SystemMemory);
295
296 // we are now done with the Allocation object
297 DestroyAllocationStruct(Memory);
298
299 UsedMemorySize -= Size;
301
302 // Search for a place to insert a new free entry
303 FFreeEntry* Prev = NULL;
304 FFreeEntry* Entry = FreeList;
305 while (Entry && Offset > Entry->Location)
306 {
307 Prev = Entry;
308 Entry = Entry->Next;
309 }
310
311 // Are we right before this free entry?
312 if (Entry && (Offset + Size) == Entry->Location)
313 {
314 // Join with chunk
315 Entry->Location -= uint32(AllocationSize);
316 Entry->BlockSize += uint32(AllocationSize);
317
318 // Can we join the two entries?
319 if (Prev && (Prev->Location + Prev->BlockSize) == Entry->Location)
320 {
321 Prev->BlockSize += Entry->BlockSize;
322 Prev->Next = Entry->Next;
323 MaxFreeEntrySize = FMath::Max(MaxFreeEntrySize, Prev->BlockSize);
324 delete Entry;
325 }
326 else
327 {
328 MaxFreeEntrySize = FMath::Max(MaxFreeEntrySize, Entry->BlockSize);
329 }
330 return true;
331 }
332
333 // Are we right after the previous free entry?
334 if (Prev && (Prev->Location + Prev->BlockSize + Padding) == Offset)
335 {
336 // Join with chunk
337 Prev->BlockSize += uint32(AllocationSize);
338
339 // Can we join the two entries?
340 if (Entry && (Prev->Location + Prev->BlockSize) == Entry->Location)
341 {
342 Prev->BlockSize += Entry->BlockSize;
343 Prev->Next = Entry->Next;
344 delete Entry;
345 }
346 MaxFreeEntrySize = FMath::Max(MaxFreeEntrySize, Prev->BlockSize);
347 return true;
348 }
349
350 // Insert a new entry.
351 FFreeEntry* NewFree = new FFreeEntry(Entry, uint32(Offset - Padding), AllocationSize);
352 FFreeEntry*& PrevRef = Prev ? Prev->Next : FreeList;
353 PrevRef = NewFree;
354 MaxFreeEntrySize = FMath::Max(MaxFreeEntrySize, NewFree->BlockSize);
355 return true;
356 }
357
358 void GetAllocationInfo(uint64& Used, uint64& Free)
359 {
360 // @todo: Validate this accounts for alignment padding in the right way
361 Used = UsedMemorySize;
362 Free = HeapSize - UsedMemorySize;
363 }
364
366 {
367 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Full Free List:\n"));
368
369 TMap<uint64, uint32> FreeBlockSizeHistogram;
370
371 uint32 Count = 0;
372 uint64 Total = 0;
373
374 FFreeEntry* Entry = FreeList;
375
376 while (Entry)
377 {
378 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" Location: %d Size: %d\n"), Entry->Location, Entry->BlockSize);
379 if (FreeBlockSizeHistogram.Contains(Entry->BlockSize))
380 {
381 FreeBlockSizeHistogram.FindChecked(Entry->BlockSize)++;
382 }
383 else
384 {
385 FreeBlockSizeHistogram.Add(Entry->BlockSize) = 1;
386 }
387
388 Count++;
389 Total += Entry->BlockSize;
390
391 Entry = Entry->Next;
392 }
393
394 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Num Blocks: %d Total Size: %d\n"), Count, Total);
395
396 for (auto It = FreeBlockSizeHistogram.CreateIterator(); It; ++It)
397 {
398 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" %d, %d\n"), It.Key(), It.Value());
399 }
400
401
402 }
403
404 // returns an address usable FOR TRACKING ONLY!
405 virtual void* GetAddressForTracking(uint32 Offset) = 0;
406
407private:
409 {
410 public:
411 /** Constructor */
412 FFreeEntry(FFreeEntry *NextEntry, uint32 InLocation, uint64 InSize)
413 : Location(InLocation)
414 , BlockSize(uint32(InSize))
415 , Next(NextEntry)
416 {
417 }
418
419 /**
420 * Determine if the given allocation with this alignment and size will fit
421 * @param AllocationSize Already aligned size of an allocation
422 * @param Alignment Alignment of the allocation (location may need to increase to match alignment)
423 * @return true if the allocation will fit
424 */
425 bool CanFit(uint64 AlignedSize, uint32 Alignment)
426 {
427 // location of the aligned allocation
428 uint32 AlignedLocation = Align(Location, Alignment);
429
430 // if we fit even after moving up the location for alignment, then we are good
431 return (AlignedSize + (AlignedLocation - Location)) <= BlockSize;
432 }
433
434 /**
435 * Take a free chunk, and split it into a used chunk and a free chunk
436 * @param UsedSize The size of the used amount (anything left over becomes free chunk)
437 * @param Alignment The alignment of the allocation (location and size)
438 * @param bDelete Whether or not to delete this FreeEntry (ie no more is left over after splitting)
439 *
440 * @return The location of the free data
441 */
442 uint32 Split(uint64 UsedSize, uint32 Alignment, bool &bDelete, uint32& Padding, uint32 MinSize)
443 {
444 // make sure we are already aligned
445 check((UsedSize & (Alignment - 1)) == 0);
446
447 // this is the pointer to the free data
448 uint32 FreeLoc = Align(Location, Alignment);
449
450 // Adjust the allocated size for any alignment padding
451 Padding = FreeLoc - Location;
452 uint64 AllocationSize = UsedSize + uint64(Padding);
453
454 // see if there's enough space left over for a new free chunk (of at least a certain size)
455 if (BlockSize - AllocationSize >= MinSize)
456 {
457 // update this free entry to just point to what's left after using the UsedSize
458 Location += uint32(AllocationSize);
459 BlockSize -= uint32(AllocationSize);
460 bDelete = false;
461 }
462 // if no more room, then just remove this entry from the list of free items
463 else
464 {
465 bDelete = true;
466 }
467
468 // return a usable pointer!
469 return FreeLoc;
470 }
471
472 /** Offset in the heap */
473 uint32 Location;
474
475 /** Size of the free block */
476 uint32 BlockSize;
477
478 /** Next block of free memory. */
480 };
481
482//protected:
483// @todo make accessors for TGenericGrowableAllocator to call
484public:
485
486 // type of this memory, up to the subclass to define what it means
488
489 /** Size of the heap */
490 uint64 HeapSize;
491
492 /** Size of used memory */
494
495 /** Size of the largest free entry (will be MaxFreeEntrySizeDirty if unknown) */
497
498 /** List of free blocks */
500
501 /** Shared critical section */
503
504 static const uint32 MaxFreeEntrySizeDirty = 0xffffffff;
505};
506
507
508/**
509 * Allocator that will grow as needed with direct mapped memory for a given memory type
510 */
511template <typename ChunkAllocatorType, typename GrowableAllocationBaseType>
513{
514public:
515
516 /**
517 * Constructor
518 * Internally allocates address space for use only by this allocator
519 *
520 * @param Type - The templated memory type to allocate with this allocator
521 * @param StatRegion - The region of memory this is responsible for, for updating the region max sizes
522 */
523 TGenericGrowableAllocator(uint64 InitialSize, uint32 InType, uint32 InSubAllocationAlignment, FName InStatRegionName, const FName* InOwnerTypeToStatIdMap, void* InUserData)
529 {
530 // create initial chunk
531 if (InitialSize > 0)
532 {
534 }
535 }
536
537
538 /**
539 * Destructor
540 */
542 {
543 // remove any existing chunks
544 for (int32 Index = 0; Index < AllocChunks.Num(); Index++)
545 {
547 if (Chunk)
548 {
550 if (!Chunk->IsEmpty())
551 {
552 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Trying to free a non-empty chunk."));
554 }
555#endif
556
558 }
559 }
560 }
561
562
563 GrowableAllocationBaseType* Malloc(uint32 Size, uint32 Alignment, int32 OwnerType)
564 {
565 // make sure we have some minimal alignment
567
568 // multi-thread protection
570
572
573 // align the size to match what Malloc does below
575
576 // Update stats.
581 TotalAllocs++;
582
583 // search for an existing alloc chunk with enough space
585 {
588 {
590 break;
591 }
592 }
593
594 // create a new chunk with enough space + alignment to Switch_GrowableHeapAlignment_MB and allocate out of it
595 if (AvailableChunk == nullptr)
596 {
598 }
599
600 // allocate from the space in the chunk
602 if (AvailableChunk)
603 {
605 }
606
607 if (AvailableChunk == nullptr || Result == nullptr)
608 {
610 return nullptr;
611 }
612
616
617 // track per type allocation info
619
622#if ALLOCATION_HISTOGRAM_DETAILED
624#endif
625#endif // ALLOCATION_HISTOGRAM
626
627#if STATS
629 {
631 }
632#endif
633
634 return Result;
635 }
636
637 bool Free(GrowableAllocationBaseType* Memory)
638 {
639 if (Memory == nullptr)
640 {
641 return true;
642 }
643
644 // multi-thread protection
646
647 // starting address space idx used by the chunk containing the allocation
649 {
652 {
655
656 // untrack per type allocation info
658
661#if ALLOCATION_HISTOGRAM_DETAILED
663#endif
664#endif // ALLOCATION_HISTOGRAM
665
666#if STATS
668 {
670 }
671#endif
672 // free space in the chunk
673 Chunk->Free(Memory);
675
676 // never toss the only chunk
677 if (Chunk->IsEmpty())// && AllocChunks.Num() > 1)
678 {
679 // if empty then unmap and decommit physical memory
681 }
682
683 // return success
684 return true;
685 }
686 }
687
688 // if we got here, we failed to free the pointer
689 UE_LOG(LogCore, Fatal, TEXT("Tried to free invalid pointer"));
690 return false;
691 }
692
693
694 void GetAllocationInfo(uint32&Chunks, uint64& Used, uint64& Free)
695 {
696 // multi-thread protection
698
699 Chunks = 0;
700 // pass off to individual alloc chunks
702 {
704 if (Chunk)
705 {
706 uint64 ChunkUsed = 0;
707 uint64 ChunkFree = 0;
709 Used += ChunkUsed;
710 Free += ChunkFree;
711 Chunks++;
712 }
713 }
714 }
715
716 bool DoesAllocatorContainAddress(const void* Address)
717 {
718 // multi-thread protection
720
721 // loop through the chunks, query each one to see if they contain the address
723 {
726 {
727 return true;
728 }
729 }
730 return false;
731 }
732
734 {
735 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" Allocator has %d chunks\n"), AllocChunks.Num());
736 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" Allocator average allocation size is %d (%lld over %lld allocs)\n"), (uint32)((float)TotalAllocated / (float)TotalAllocs), TotalAllocated, TotalAllocs);
737 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" Allocator average waste (on top of allocation) size is %d (%lld over %lld allocs)\n"), (uint32)((float)TotalWaste / (float)TotalAllocs), TotalWaste, TotalAllocs);
738 }
739
741 {
742 // multi-thread protection
744
746 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("Total Allocations Histogram:\n"));
748 {
750 }
751#endif // ALLOCATION_HISTOGRAM
752
753 int32 NumChunks = 0;
754 // pass off to individual alloc chunks
756 {
757 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("\n-----------------\nChunk %d\n"),ChunkIndex);
759 if (Chunk)
760 {
762 }
763 }
764 }
765
767 {
769 // we use LowLevel here because we are usually dumping this out while
771 {
772 const AllocationInfo& Info = InfoIt.Value();
773
775 {
777 }
778 else
779 {
780 FPlatformMisc::LowLevelOutputDebugStringf(TEXT(" 'OwnerType %d': %lld Allocs: %d Frees: %d\n"), InfoIt.Key(), Info.TotalAllocated, Info.Counts.Allocations, Info.Counts.Frees);
781 }
782#if ALLOCATION_HISTOGRAM_DETAILED
784 {
786 }
787#endif
788 }
789#endif // ALLOCATION_HISTOGRAM
790 }
791
792private:
793
794 /** Updates the memory stat max sizes when chunks are added/removed */
796 {
797 }
798
799 /**
800 * Create a new allocation chunk to fit the requested size. All chunks are aligned to MIN_CHUNK_ALIGNMENT
801 *
802 * @param Size - size of chunk
803 */
804 ChunkAllocatorType* CreateAllocChunk(uint64 Size)
805 {
809
810 // add a new entry to list (reusing any empty slots)
813 {
815 }
816 else
817 {
819 }
820
821#if STATS
823#endif
824
825 return NewChunk;
826 }
827
828
829 /**
830 * Removes an existing allocated chunk. Unmaps its memory, decommits physical memory back to OS,
831 * flushes address entries associated with it, and deletes it
832 *
833 * @param Chunk - existing chunk to remove
834 */
835 void RemoveAllocChunk(ChunkAllocatorType* Chunk)
836 {
838
840
841 // remove entry
844 AllocChunks[FoundIdx] = NULL;
845 Chunk->Destroy();
846 delete Chunk;
847
848#if STATS
850#endif
851 }
852
853
854 /** triggered during out of memory failure for this allocator */
855 void OutOfMemory(uint32 Size)
856 {
858 FPlatformMisc::LowLevelOutputDebugStringf(TEXT("FGrowableAllocator: OOM allocating %dbytes %fMB"), Size, static_cast<float>(Size) / 1024.0f / 1024.0f);
859 UE_LOG(LogCore, Fatal, TEXT("FGrowableAllocator: OOM allocating %dbytes %fMB"), Size, static_cast<float>(Size) / 1024.0f / 1024.0f);
860#endif
861 }
862
863 // size must be aligned at least to this
865
866 /** total currently allocated from OS */
868
869 /** Just stat tracking */
872
873 /** The type of memory this allocator allocates from the kernel */
875
876 /** The stat memory region to update max size */
878
879 /** For stats/dumping, we use this to convert OwnerType of an allocation to a printable name*/
881
882 /** list of currently allocated chunks */
884
885 // extra data to pass to new Chunks
886 void* UserData;
887
888 // a critical section used to coordinate all access in this instance of the allocator and it's chunks
890
893 {
896 };
898 {
900#if ALLOCATION_HISTOGRAM_DETAILED
902#endif
904 };
905
907
911#endif // ALLOCATION_HISTOGRAM
912};
#define checkSlow(expr)
#define check(expr)
#define checkf(expr, format,...)
#define UE_BUILD_TEST
Definition Build.h:23
#define UE_BUILD_SHIPPING
Definition Build.h:4
#define STATS
Definition Build.h:317
#define ALLOCATION_HISTOGRAM
#define LLM(...)
#define TEXT(x)
Definition Platform.h:1108
FWindowsCriticalSection FCriticalSection
virtual uint64 GetWasteApproximation()
FFreeEntry * Next
uint32 Split(uint64 UsedSize, uint32 Alignment, bool &bDelete, uint32 &Padding, uint32 MinSize)
uint32 Location
FFreeEntry(FFreeEntry *NextEntry, uint32 InLocation, uint64 InSize)
uint32 BlockSize
bool CanFit(uint64 AlignedSize, uint32 Alignment)
FGrowableMallocChunk(uint64 InSize, uint32 Type, FCriticalSection *InCriticalSection)
virtual void DestroyInternalMemory()=0
virtual bool DoesChunkContainAllocation(const FGrowableAllocationBase *Allocation)=0
FGrowableAllocationBase * Malloc(uint32 Size, uint32 Alignment, uint32 MinAllocationSize, int32 OwnerType)
bool Free(FGrowableAllocationBase *Memory)
static const uint32 MaxFreeEntrySizeDirty
virtual void * GetAddressForTracking(uint32 Offset)=0
virtual void DestroyAllocationStruct(FGrowableAllocationBase *Allocation)
bool CanFitEntry(uint32 Size, uint32 Alignment)
void GetAllocationInfo(uint64 &Used, uint64 &Free)
virtual FGrowableAllocationBase * CreateAllocationStruct()=0
FCriticalSection * CriticalSection
virtual uint64 CreateInternalMemory(uint64 Size)=0
virtual void InitializeAllocationStruct(FGrowableAllocationBase *Allocation)=0
virtual bool DoesChunkContainAddress(const void *Address)=0
UE_NODISCARD_CTOR FScopeLock(FCriticalSection *InSynchObject)
Definition ScopeLock.h:35
ChunkAllocatorType * CreateAllocChunk(uint64 Size)
TGenericGrowableAllocator(uint64 InitialSize, uint32 InType, uint32 InSubAllocationAlignment, FName InStatRegionName, const FName *InOwnerTypeToStatIdMap, void *InUserData)
TArray< ChunkAllocatorType * > AllocChunks
void RemoveAllocChunk(ChunkAllocatorType *Chunk)
bool DoesAllocatorContainAddress(const void *Address)
GrowableAllocationBaseType * Malloc(uint32 Size, uint32 Alignment, int32 OwnerType)
bool Free(GrowableAllocationBaseType *Memory)
void GetAllocationInfo(uint32 &Chunks, uint64 &Used, uint64 &Free)