From 2347527fb2b244d3fb98697a430816bb74b0918b Mon Sep 17 00:00:00 2001 From: Justin King Date: Wed, 9 Jul 2025 07:06:58 -0700 Subject: [PATCH] scudo: refactor scudo::Allocator::deallocate Signed-off-by: Justin King --- compiler-rt/lib/scudo/standalone/combined.h | 129 +++++++++++------- .../scudo/standalone/tests/combined_test.cpp | 26 ++-- .../lib/scudo/standalone/wrappers_cpp.cpp | 28 ++-- 3 files changed, 105 insertions(+), 78 deletions(-) diff --git a/compiler-rt/lib/scudo/standalone/combined.h b/compiler-rt/lib/scudo/standalone/combined.h index 87acdec2a3bac..ec56307b66154 100644 --- a/compiler-rt/lib/scudo/standalone/combined.h +++ b/compiler-rt/lib/scudo/standalone/combined.h @@ -432,60 +432,30 @@ class Allocator { SizeOrUnusedBytes, FillContents); } - NOINLINE void deallocate(void *Ptr, Chunk::Origin Origin, uptr DeleteSize = 0, - UNUSED uptr Alignment = MinAlignment) { - if (UNLIKELY(!Ptr)) - return; - - // For a deallocation, we only ensure minimal initialization, meaning thread - // local data will be left uninitialized for now (when using ELF TLS). The - // fallback cache will be used instead. This is a workaround for a situation - // where the only heap operation performed in a thread would be a free past - // the TLS destructors, ending up in initialized thread specific data never - // being destroyed properly. Any other heap operation will do a full init. - initThreadMaybe(/*MinimalInit=*/true); - -#ifdef GWP_ASAN_HOOKS - if (UNLIKELY(GuardedAlloc.pointerIsMine(Ptr))) { - GuardedAlloc.deallocate(Ptr); - Stats.lock(); - Stats.add(StatFree, GuardedAllocSlotSize); - Stats.sub(StatAllocated, GuardedAllocSlotSize); - Stats.unlock(); - return; - } -#endif // GWP_ASAN_HOOKS - - if (UNLIKELY(!isAligned(reinterpret_cast(Ptr), MinAlignment))) - reportMisalignedPointer(AllocatorAction::Deallocating, Ptr); - - void *TaggedPtr = Ptr; - Ptr = getHeaderTaggedPointer(Ptr); - - Chunk::UnpackedHeader Header; - Chunk::loadHeader(Cookie, Ptr, &Header); - - if (UNLIKELY(Header.State != Chunk::State::Allocated)) - reportInvalidChunkState(AllocatorAction::Deallocating, Ptr); + ALWAYS_INLINE void deallocate(void *Ptr, Chunk::Origin Origin) { + deallocate(Ptr, Origin, /*DeleteSize=*/0, /*HasDeleteSize=*/false, + /*DeleteAlignment=*/0, /*HasDeleteAlignment=*/false); + } - const Options Options = Primary.Options.load(); - if (Options.get(OptionBit::DeallocTypeMismatch)) { - if (UNLIKELY(Header.OriginOrWasZeroed != Origin)) { - // With the exception of memalign'd chunks, that can be still be free'd. - if (Header.OriginOrWasZeroed != Chunk::Origin::Memalign || - Origin != Chunk::Origin::Malloc) - reportDeallocTypeMismatch(AllocatorAction::Deallocating, Ptr, - Header.OriginOrWasZeroed, Origin); - } - } + ALWAYS_INLINE void deallocateSized(void *Ptr, Chunk::Origin Origin, + uptr DeleteSize) { + deallocate(Ptr, Origin, /*DeleteSize=*/DeleteSize, /*HasDeleteSize=*/true, + /*DeleteAlignment=*/0, /*HasDeleteAlignment=*/false); + } - const uptr Size = getSize(Ptr, &Header); - if (DeleteSize && Options.get(OptionBit::DeleteSizeMismatch)) { - if (UNLIKELY(DeleteSize != Size)) - reportDeleteSizeMismatch(Ptr, DeleteSize, Size); - } + ALWAYS_INLINE void deallocateSizedAligned(void *Ptr, Chunk::Origin Origin, + uptr DeleteSize, + uptr DeleteAlignment) { + deallocate(Ptr, Origin, /*DeleteSize=*/DeleteSize, /*HasDeleteSize=*/true, + /*DeleteAlignment=*/DeleteAlignment, + /*HasDeleteAlignment=*/true); + } - quarantineOrDeallocateChunk(Options, TaggedPtr, &Header, Size); + ALWAYS_INLINE void deallocateAligned(void *Ptr, Chunk::Origin Origin, + uptr DeleteAlignment) { + deallocate(Ptr, Origin, /*DeleteSize=*/0, /*HasDeleteSize=*/false, + /*DeleteAlignment=*/DeleteAlignment, + /*HasDeleteAlignment=*/true); } void *reallocate(void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) { @@ -1245,6 +1215,63 @@ class Allocator { return TaggedPtr; } + NOINLINE void deallocate(void *Ptr, Chunk::Origin Origin, uptr DeleteSize, + bool HasDeleteSize, uptr DeleteAlignment, + bool HasDeleteAlignment) { + if (UNLIKELY(!Ptr)) + return; + + // For a deallocation, we only ensure minimal initialization, meaning thread + // local data will be left uninitialized for now (when using ELF TLS). The + // fallback cache will be used instead. This is a workaround for a situation + // where the only heap operation performed in a thread would be a free past + // the TLS destructors, ending up in initialized thread specific data never + // being destroyed properly. Any other heap operation will do a full init. + initThreadMaybe(/*MinimalInit=*/true); + +#ifdef GWP_ASAN_HOOKS + if (UNLIKELY(GuardedAlloc.pointerIsMine(Ptr))) { + GuardedAlloc.deallocate(Ptr); + Stats.lock(); + Stats.add(StatFree, GuardedAllocSlotSize); + Stats.sub(StatAllocated, GuardedAllocSlotSize); + Stats.unlock(); + return; + } +#endif // GWP_ASAN_HOOKS + + if (UNLIKELY(!isAligned(reinterpret_cast(Ptr), MinAlignment))) + reportMisalignedPointer(AllocatorAction::Deallocating, Ptr); + + void *TaggedPtr = Ptr; + Ptr = getHeaderTaggedPointer(Ptr); + + Chunk::UnpackedHeader Header; + Chunk::loadHeader(Cookie, Ptr, &Header); + + if (UNLIKELY(Header.State != Chunk::State::Allocated)) + reportInvalidChunkState(AllocatorAction::Deallocating, Ptr); + + const Options Options = Primary.Options.load(); + if (Options.get(OptionBit::DeallocTypeMismatch)) { + if (UNLIKELY(Header.OriginOrWasZeroed != Origin)) { + // With the exception of memalign'd chunks, that can be still be free'd. + if (Header.OriginOrWasZeroed != Chunk::Origin::Memalign || + Origin != Chunk::Origin::Malloc) + reportDeallocTypeMismatch(AllocatorAction::Deallocating, Ptr, + Header.OriginOrWasZeroed, Origin); + } + } + + const uptr Size = getSize(Ptr, &Header); + if (DeleteSize && Options.get(OptionBit::DeleteSizeMismatch)) { + if (UNLIKELY(DeleteSize != Size)) + reportDeleteSizeMismatch(Ptr, DeleteSize, Size); + } + + quarantineOrDeallocateChunk(Options, TaggedPtr, &Header, Size); + } + void quarantineOrDeallocateChunk(const Options &Options, void *TaggedPtr, Chunk::UnpackedHeader *Header, uptr Size) NO_THREAD_SAFETY_ANALYSIS { diff --git a/compiler-rt/lib/scudo/standalone/tests/combined_test.cpp b/compiler-rt/lib/scudo/standalone/tests/combined_test.cpp index 7e8d5b4396d2e..2ee33b14541ff 100644 --- a/compiler-rt/lib/scudo/standalone/tests/combined_test.cpp +++ b/compiler-rt/lib/scudo/standalone/tests/combined_test.cpp @@ -327,7 +327,7 @@ void ScudoCombinedTest::BasicTest(scudo::uptr SizeLog) { EXPECT_LE(Size, Allocator->getUsableSize(P)); memset(P, 0xaa, Size); checkMemoryTaggingMaybe(Allocator, P, Size, Align); - Allocator->deallocate(P, Origin, Size); + Allocator->deallocateSized(P, Origin, Size); } } @@ -374,7 +374,7 @@ SCUDO_TYPED_TEST(ScudoCombinedTest, ZeroContents) { for (scudo::uptr I = 0; I < Size; I++) ASSERT_EQ((reinterpret_cast(P))[I], '\0'); memset(P, 0xaa, Size); - Allocator->deallocate(P, Origin, Size); + Allocator->deallocateSized(P, Origin, Size); } } } @@ -392,7 +392,7 @@ SCUDO_TYPED_TEST(ScudoCombinedTest, ZeroFill) { for (scudo::uptr I = 0; I < Size; I++) ASSERT_EQ((reinterpret_cast(P))[I], '\0'); memset(P, 0xaa, Size); - Allocator->deallocate(P, Origin, Size); + Allocator->deallocateSized(P, Origin, Size); } } } @@ -419,7 +419,7 @@ SCUDO_TYPED_TEST(ScudoCombinedTest, PatternOrZeroFill) { ASSERT_TRUE(V == scudo::PatternFillByte || V == 0); } memset(P, 0xaa, Size); - Allocator->deallocate(P, Origin, Size); + Allocator->deallocateSized(P, Origin, Size); } } } @@ -709,7 +709,7 @@ SCUDO_TYPED_TEST(ScudoCombinedTest, ThreadedCombined) { while (!V.empty()) { auto Pair = V.back(); - Allocator->deallocate(Pair.first, Origin, Pair.second); + Allocator->deallocateSized(Pair.first, Origin, Pair.second); V.pop_back(); } }); @@ -782,26 +782,26 @@ TEST(ScudoCombinedDeathTest, DeathCombined) { EXPECT_NE(P, nullptr); // Invalid sized deallocation. - EXPECT_DEATH(Allocator->deallocate(P, Origin, Size + 8U), ""); + EXPECT_DEATH(Allocator->deallocateSized(P, Origin, Size + 8U), ""); // Misaligned pointer. Potentially unused if EXPECT_DEATH isn't available. UNUSED void *MisalignedP = reinterpret_cast(reinterpret_cast(P) | 1U); - EXPECT_DEATH(Allocator->deallocate(MisalignedP, Origin, Size), ""); + EXPECT_DEATH(Allocator->deallocateSized(MisalignedP, Origin, Size), ""); EXPECT_DEATH(Allocator->reallocate(MisalignedP, Size * 2U), ""); // Header corruption. scudo::u64 *H = reinterpret_cast(scudo::Chunk::getAtomicHeader(P)); *H ^= 0x42U; - EXPECT_DEATH(Allocator->deallocate(P, Origin, Size), ""); + EXPECT_DEATH(Allocator->deallocateSized(P, Origin, Size), ""); *H ^= 0x420042U; - EXPECT_DEATH(Allocator->deallocate(P, Origin, Size), ""); + EXPECT_DEATH(Allocator->deallocateSized(P, Origin, Size), ""); *H ^= 0x420000U; // Invalid chunk state. - Allocator->deallocate(P, Origin, Size); - EXPECT_DEATH(Allocator->deallocate(P, Origin, Size), ""); + Allocator->deallocateSized(P, Origin, Size); + EXPECT_DEATH(Allocator->deallocateSized(P, Origin, Size), ""); EXPECT_DEATH(Allocator->reallocate(P, Size * 2U), ""); EXPECT_DEATH(Allocator->getUsableSize(P), ""); } @@ -908,13 +908,13 @@ SCUDO_TYPED_TEST(ScudoCombinedTest, DisableMemInit) { memset(Ptrs[I], 0xaa, Size); } for (unsigned I = 0; I != Ptrs.size(); ++I) - Allocator->deallocate(Ptrs[I], Origin, Size); + Allocator->deallocateSized(Ptrs[I], Origin, Size); for (unsigned I = 0; I != Ptrs.size(); ++I) { Ptrs[I] = Allocator->allocate(Size - 8, Origin); memset(Ptrs[I], 0xbb, Size - 8); } for (unsigned I = 0; I != Ptrs.size(); ++I) - Allocator->deallocate(Ptrs[I], Origin, Size - 8); + Allocator->deallocateSized(Ptrs[I], Origin, Size - 8); for (unsigned I = 0; I != Ptrs.size(); ++I) { Ptrs[I] = Allocator->allocate(Size, Origin, 1U << MinAlignLog, true); for (scudo::uptr J = 0; J < Size; ++J) diff --git a/compiler-rt/lib/scudo/standalone/wrappers_cpp.cpp b/compiler-rt/lib/scudo/standalone/wrappers_cpp.cpp index 098d4f71acc4a..f1942acd02331 100644 --- a/compiler-rt/lib/scudo/standalone/wrappers_cpp.cpp +++ b/compiler-rt/lib/scudo/standalone/wrappers_cpp.cpp @@ -104,47 +104,47 @@ INTERFACE WEAK void operator delete[](void *ptr, } INTERFACE WEAK void operator delete(void *ptr, size_t size) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::New, size); + Allocator.deallocateSized(ptr, scudo::Chunk::Origin::New, size); } INTERFACE WEAK void operator delete[](void *ptr, size_t size) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::NewArray, size); + Allocator.deallocateSized(ptr, scudo::Chunk::Origin::NewArray, size); } INTERFACE WEAK void operator delete(void *ptr, std::align_val_t align) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::New, 0, - static_cast(align)); + Allocator.deallocateAligned(ptr, scudo::Chunk::Origin::New, + static_cast(align)); } INTERFACE WEAK void operator delete[](void *ptr, std::align_val_t align) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::NewArray, 0, - static_cast(align)); + Allocator.deallocateAligned(ptr, scudo::Chunk::Origin::NewArray, + static_cast(align)); } INTERFACE WEAK void operator delete(void *ptr, std::align_val_t align, std::nothrow_t const &) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::New, 0, - static_cast(align)); + Allocator.deallocateAligned(ptr, scudo::Chunk::Origin::New, + static_cast(align)); } INTERFACE WEAK void operator delete[](void *ptr, std::align_val_t align, std::nothrow_t const &) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::NewArray, 0, - static_cast(align)); + Allocator.deallocateAligned(ptr, scudo::Chunk::Origin::NewArray, + static_cast(align)); } INTERFACE WEAK void operator delete(void *ptr, size_t size, std::align_val_t align) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::New, size, - static_cast(align)); + Allocator.deallocateSizedAligned(ptr, scudo::Chunk::Origin::New, size, + static_cast(align)); } INTERFACE WEAK void operator delete[](void *ptr, size_t size, std::align_val_t align) NOEXCEPT { reportDeallocation(ptr); - Allocator.deallocate(ptr, scudo::Chunk::Origin::NewArray, size, - static_cast(align)); + Allocator.deallocateSizedAligned(ptr, scudo::Chunk::Origin::NewArray, size, + static_cast(align)); } #endif // !SCUDO_ANDROID || !_BIONIC