From a234d445c419d3ee96c6123e39b98f3db961b32e Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 29 Jun 2016 01:16:07 -0700 Subject: [PATCH] deps: backport a715957 from V8 upstream This commit does not include the changes to `src/heap/scavenger.cc`. These changes would revert the changes that should have come in 086bd5aede, meaning that there is no issue with that change missing in the previous commit. Original commit message: Iterate handles with special left-trim visitor BUG=chromium:620553 LOG=N R=hpayer@chromium.org Review-Url: https://codereview.chromium.org/2102243002 Cr-Commit-Position: refs/heads/master@{#37366} PR-URL: https://github.com/nodejs/node/pull/10668 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/src/heap/heap-inl.h | 25 ------------------ deps/v8/src/heap/heap.cc | 45 ++++++++++++++++++++++++++++++++ deps/v8/src/heap/heap.h | 6 ----- deps/v8/src/heap/mark-compact.cc | 2 -- 4 files changed, 45 insertions(+), 33 deletions(-) diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index 39110f6d58e17a..20540b9e88e992 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -398,31 +398,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) { static_cast(byte_size / kPointerSize)); } -bool Heap::PurgeLeftTrimmedObject(Object** object) { - HeapObject* current = reinterpret_cast(*object); - const MapWord map_word = current->map_word(); - if (current->IsFiller() && !map_word.IsForwardingAddress()) { -#ifdef DEBUG - // We need to find a FixedArrayBase map after walking the fillers. - while (current->IsFiller()) { - Address next = reinterpret_cast
(current); - if (current->map() == one_pointer_filler_map()) { - next += kPointerSize; - } else if (current->map() == two_pointer_filler_map()) { - next += 2 * kPointerSize; - } else { - next += current->Size(); - } - current = reinterpret_cast(next); - } - DCHECK(current->IsFixedArrayBase()); -#endif // DEBUG - *object = nullptr; - return true; - } - return false; -} - void Heap::MoveBlock(Address dst, Address src, int byte_size) { DCHECK(IsAligned(byte_size, kPointerSize)); diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 6bc200a0e59289..7730327b412d73 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -5316,6 +5316,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) { v->Synchronize(VisitorSynchronization::kSmiRootList); } +// We cannot avoid stale handles to left-trimmed objects, but can only make +// sure all handles still needed are updated. Filter out a stale pointer +// and clear the slot to allow post processing of handles (needed because +// the sweeper might actually free the underlying page). +class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor { + public: + explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { + USE(heap_); + } + + void VisitPointer(Object** p) override { FixHandle(p); } + + void VisitPointers(Object** start, Object** end) override { + for (Object** p = start; p < end; p++) FixHandle(p); + } + + private: + inline void FixHandle(Object** p) { + HeapObject* current = reinterpret_cast(*p); + if (!current->IsHeapObject()) return; + const MapWord map_word = current->map_word(); + if (!map_word.IsForwardingAddress() && current->IsFiller()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == heap_->one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == heap_->two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *p = nullptr; + } + } + + Heap* heap_; +}; void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); @@ -5339,6 +5382,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->Synchronize(VisitorSynchronization::kCompilationCache); // Iterate over local handles in handle scopes. + FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this); + isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor); isolate_->handle_scope_implementer()->Iterate(v); isolate_->IterateDeferredHandles(v); v->Synchronize(VisitorSynchronization::kHandleScope); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 529050c8bf51ce..0afac311c4816e 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -590,12 +590,6 @@ class Heap { // jslimit_/real_jslimit_ variable in the StackGuard. void SetStackLimits(); - // We cannot avoid stale handles to left-trimmed objects, but can only make - // sure all handles still needed are updated. Filter out a stale pointer - // and clear the slot to allow post processing of handles (needed because - // the sweeper might actually free the underlying page). - inline bool PurgeLeftTrimmedObject(Object** object); - // Notifies the heap that is ok to start marking or other activities that // should not happen during deserialization. void NotifyDeserializationComplete(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index 3a71578f713632..c827237598ee43 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1650,8 +1650,6 @@ class RootMarkingVisitor : public ObjectVisitor { HeapObject* object = ShortCircuitConsString(p); - if (collector_->heap()->PurgeLeftTrimmedObject(p)) return; - MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return;