diff --git a/compiler-rt/lib/scudo/standalone/allocator_config.def b/compiler-rt/lib/scudo/standalone/allocator_config.def index 84fcec0877d40..f427488ac4293 100644 --- a/compiler-rt/lib/scudo/standalone/allocator_config.def +++ b/compiler-rt/lib/scudo/standalone/allocator_config.def @@ -111,6 +111,11 @@ PRIMARY_OPTIONAL_TYPE(ConditionVariableT, ConditionVariableDummy) // to, in increments of a power-of-2 scale. See `CompactPtrScale` also. PRIMARY_OPTIONAL_TYPE(CompactPtrT, uptr) +// Clears the memory slot when an allocation is returned to the allocator. +// Operating systems that detects pages filled with zeroes will decommit +// memory. +PRIMARY_OPTIONAL(const bool, ZeroOnDealloc, false) + // SECONDARY_REQUIRED_TEMPLATE_TYPE(NAME) // // Defines the type of Secondary Cache to use. diff --git a/compiler-rt/lib/scudo/standalone/flags.inc b/compiler-rt/lib/scudo/standalone/flags.inc index ff0c28e1db7c4..2abee3744c28f 100644 --- a/compiler-rt/lib/scudo/standalone/flags.inc +++ b/compiler-rt/lib/scudo/standalone/flags.inc @@ -34,6 +34,10 @@ SCUDO_FLAG(bool, delete_size_mismatch, true, SCUDO_FLAG(bool, zero_contents, false, "Zero chunk contents on allocation.") +SCUDO_FLAG(bool, zero_on_dealloc, false, + "Clears the memory slot when an allocation is returned to the " + "allocator.") + SCUDO_FLAG(bool, pattern_fill_contents, false, "Pattern fill chunk contents on allocation.") diff --git a/compiler-rt/lib/scudo/standalone/primary64.h b/compiler-rt/lib/scudo/standalone/primary64.h index 25ee999199114..145ee26dd855f 100644 --- a/compiler-rt/lib/scudo/standalone/primary64.h +++ b/compiler-rt/lib/scudo/standalone/primary64.h @@ -53,6 +53,7 @@ template class SizeClassAllocator64 { static const uptr CompactPtrScale = Config::getCompactPtrScale(); static const uptr RegionSizeLog = Config::getRegionSizeLog(); static const uptr GroupSizeLog = Config::getGroupSizeLog(); + static const bool ZeroOnDealloc = Config::getZeroOnDealloc(); static_assert(RegionSizeLog >= GroupSizeLog, "Group size shouldn't be greater than the region size"); static const uptr GroupScale = GroupSizeLog - CompactPtrScale; diff --git a/compiler-rt/lib/scudo/standalone/size_class_allocator.h b/compiler-rt/lib/scudo/standalone/size_class_allocator.h index 7c7d6307f8f0a..b51ad882c28a0 100644 --- a/compiler-rt/lib/scudo/standalone/size_class_allocator.h +++ b/compiler-rt/lib/scudo/standalone/size_class_allocator.h @@ -9,6 +9,7 @@ #ifndef SCUDO_SIZE_CLASS_ALLOCATOR_H_ #define SCUDO_SIZE_CLASS_ALLOCATOR_H_ +#include "flags.h" #include "internal_defs.h" #include "list.h" #include "platform.h" @@ -28,6 +29,7 @@ template struct SizeClassAllocatorLocalCache { if (LIKELY(S)) S->link(&Stats); Allocator = A; + ZeroOnDealloc = getFlags()->zero_on_dealloc; initAllocator(); } @@ -59,6 +61,11 @@ template struct SizeClassAllocatorLocalCache { bool deallocate(uptr ClassId, void *P) { CHECK_LT(ClassId, NumClasses); + + if (SizeClassAllocator::ZeroOnDealloc || ZeroOnDealloc) { + memset(P, 0, SizeClassAllocator::getSizeByClassId(ClassId)); + } + PerClass *C = &PerClassArray[ClassId]; // If the cache is full, drain half of blocks back to the main allocator. @@ -145,6 +152,7 @@ template struct SizeClassAllocatorLocalCache { PerClass PerClassArray[NumClasses] = {}; LocalStats Stats; SizeClassAllocator *Allocator = nullptr; + bool ZeroOnDealloc = false; NOINLINE void initAllocator() { for (uptr I = 0; I < NumClasses; I++) { @@ -188,6 +196,7 @@ template struct SizeClassAllocatorNoCache { if (LIKELY(S)) S->link(&Stats); Allocator = A; + ZeroOnDealloc = getFlags()->zero_on_dealloc; initAllocator(); } @@ -211,6 +220,10 @@ template struct SizeClassAllocatorNoCache { bool deallocate(uptr ClassId, void *P) { CHECK_LT(ClassId, NumClasses); + if (SizeClassAllocator::ZeroOnDealloc || ZeroOnDealloc) { + memset(P, 0, SizeClassAllocator::getSizeByClassId(ClassId)); + } + if (ClassId == BatchClassId) return deallocateBatchClassBlock(P); @@ -288,6 +301,7 @@ template struct SizeClassAllocatorNoCache { CompactPtrT BatchClassStorage[SizeClassMap::MaxNumCachedHint] = {}; LocalStats Stats; SizeClassAllocator *Allocator = nullptr; + bool ZeroOnDealloc = false; bool deallocateBatchClassBlock(void *P) { PerClass *C = &PerClassArray[BatchClassId]; diff --git a/compiler-rt/lib/scudo/standalone/tests/primary_test.cpp b/compiler-rt/lib/scudo/standalone/tests/primary_test.cpp index 7dc38c2ede8ca..47bfc3a9fc242 100644 --- a/compiler-rt/lib/scudo/standalone/tests/primary_test.cpp +++ b/compiler-rt/lib/scudo/standalone/tests/primary_test.cpp @@ -150,6 +150,27 @@ template struct TestConfig5 { }; }; +// Enable `ZeroOnDealloc` +template struct TestConfig6 { + static const bool MaySupportMemoryTagging = false; + template using TSDRegistryT = void; + template using PrimaryT = void; + template using SecondaryT = void; + + struct Primary { + using SizeClassMap = SizeClassMapT; + static const scudo::uptr RegionSizeLog = 18U; + static const scudo::uptr GroupSizeLog = 18U; + static const scudo::s32 MinReleaseToOsIntervalMs = INT32_MIN; + static const scudo::s32 MaxReleaseToOsIntervalMs = INT32_MAX; + typedef scudo::uptr CompactPtrT; + static const scudo::uptr CompactPtrScale = 0; + static const bool EnableRandomOffset = true; + static const scudo::uptr MapSizeIncrement = 1UL << 18; + static const bool ZeroOnDealloc = true; + }; +}; + template