[NFC][asan] Add QuarantineCallback::{PreQuarantine,RecyclePassThrough}

Reviewed By: thurston

Differential Revision: https://reviews.llvm.org/D153496
This commit is contained in:
Vitaly Buka 2023-06-21 22:26:01 -07:00
parent 5017344820
commit 735bcc9279
2 changed files with 28 additions and 20 deletions

View File

@ -16,6 +16,7 @@
#include "asan_allocator.h"
#include "asan_internal.h"
#include "asan_mapping.h"
#include "asan_poisoning.h"
#include "asan_report.h"
@ -24,6 +25,7 @@
#include "lsan/lsan_common.h"
#include "sanitizer_common/sanitizer_allocator_checks.h"
#include "sanitizer_common/sanitizer_allocator_interface.h"
#include "sanitizer_common/sanitizer_common.h"
#include "sanitizer_common/sanitizer_errno.h"
#include "sanitizer_common/sanitizer_flags.h"
#include "sanitizer_common/sanitizer_internal_defs.h"
@ -196,6 +198,24 @@ struct QuarantineCallback {
stack_(stack) {
}
void PreQuarantine(AsanChunk *m) {
Flags &fl = *flags();
if (fl.max_free_fill_size > 0) {
// We have to skip the chunk header, it contains free_context_id.
uptr scribble_start = (uptr)m + kChunkHeaderSize + kChunkHeader2Size;
if (m->UsedSize() >= kChunkHeader2Size) { // Skip Header2 in user area.
uptr size_to_fill = m->UsedSize() - kChunkHeader2Size;
size_to_fill = Min(size_to_fill, (uptr)fl.max_free_fill_size);
REAL(memset)((void *)scribble_start, fl.free_fill_byte, size_to_fill);
}
}
// Poison the region.
PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY),
kAsanHeapFreeMagic);
}
void Recycle(AsanChunk *m) {
void *p = get_allocator().GetBlockBegin(m);
if (p != m) {
@ -221,6 +241,12 @@ struct QuarantineCallback {
get_allocator().Deallocate(cache_, p);
}
void RecyclePassThrough(AsanChunk *m) {
// TODO: We don't need all these here.
PreQuarantine(m);
Recycle(m);
}
void *Allocate(uptr size) {
void *res = get_allocator().Allocate(cache_, size, 1);
// TODO(alekseys): Consider making quarantine OOM-friendly.
@ -639,21 +665,6 @@ struct Allocator {
AsanThread *t = GetCurrentThread();
m->SetFreeContext(t ? t->tid() : 0, StackDepotPut(*stack));
Flags &fl = *flags();
if (fl.max_free_fill_size > 0) {
// We have to skip the chunk header, it contains free_context_id.
uptr scribble_start = (uptr)m + kChunkHeaderSize + kChunkHeader2Size;
if (m->UsedSize() >= kChunkHeader2Size) { // Skip Header2 in user area.
uptr size_to_fill = m->UsedSize() - kChunkHeader2Size;
size_to_fill = Min(size_to_fill, (uptr)fl.max_free_fill_size);
REAL(memset)((void *)scribble_start, fl.free_fill_byte, size_to_fill);
}
}
// Poison the region.
PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY),
kAsanHeapFreeMagic);
// Push into quarantine.
if (t) {
AsanThreadLocalMallocStorage *ms = &t->malloc_storage();

View File

@ -68,10 +68,6 @@ struct QuarantineBatch {
COMPILER_CHECK(sizeof(QuarantineBatch) <= (1 << 13)); // 8Kb.
// The callback interface is:
// void Callback::Recycle(Node *ptr);
// void *cb.Allocate(uptr size);
// void cb.Deallocate(void *ptr);
template<typename Callback, typename Node>
class Quarantine {
public:
@ -100,10 +96,11 @@ class Quarantine {
void Put(Cache *c, Callback cb, Node *ptr, uptr size) {
uptr max_cache_size = GetMaxCacheSize();
if (max_cache_size && size <= GetMaxSize()) {
cb.PreQuarantine(ptr);
c->Enqueue(cb, ptr, size);
} else {
// GetMaxCacheSize() == 0 only when GetMaxSize() == 0 (see Init).
cb.Recycle(ptr);
cb.RecyclePassThrough(ptr);
}
// Check cache size anyway to accommodate for runtime cache_size change.
if (c->Size() > max_cache_size)