From 47d18d4936063c5931d7876d20574e073b854ab4 Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Mon, 20 Jun 2016 07:29:54 -0700 Subject: [PATCH 1/4] deps: backport d800a65 from V8 upstream This backport does not include the original changes to SLOW_DCHECK as it does not exist in the V8 in node v4.x Original commit message: Filter out stale left-trimmed handles BUG=chromium:620553 LOG=N R=jochen@chromium.org Review-Url: https://codereview.chromium.org/2078403002 Cr-Commit-Position: refs/heads/master@{#37108} PR-URL: https://github.com/nodejs/node/pull/10668 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/src/heap/mark-compact.cc | 28 ++++++++++++++++++- .../v8/test/mjsunit/regress/regress-620553.js | 17 +++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 deps/v8/test/mjsunit/regress/regress-620553.js diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index dcc2fb943046d6..e39ff83e9f381e 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1648,8 +1648,34 @@ class RootMarkingVisitor : public ObjectVisitor { void MarkObjectByPointer(Object** p) { if (!(*p)->IsHeapObject()) return; - // Replace flat cons strings in place. HeapObject* object = ShortCircuitConsString(p); + + // We cannot avoid stale handles to left-trimmed objects, but can only make + // sure all handles still needed are updated. Filter out any stale pointers + // and clear the slot to allow post processing of handles (needed because + // the sweeper might actually free the underlying page). + if (object->IsFiller()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + Heap* heap = collector_->heap(); + HeapObject* current = object; + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == heap->one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == heap->two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *p = nullptr; + return; + } + MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return; diff --git a/deps/v8/test/mjsunit/regress/regress-620553.js b/deps/v8/test/mjsunit/regress/regress-620553.js new file mode 100644 index 00000000000000..461b9bb189e559 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-620553.js @@ -0,0 +1,17 @@ +// Copyright 2016 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-gc + +var o0 = []; +var o1 = []; +var cnt = 0; +o1.__defineGetter__(0, function() { + if (cnt++ > 2) return; + o0.shift(); + gc(); + o0.push(0); + o0.concat(o1); +}); +o1[0]; From 83144af828e71d3a6d260207afb98326ab0183fb Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 22 Jun 2016 05:21:16 -0700 Subject: [PATCH 2/4] deps: backport 7a88ff3 from V8 upstream This backport does not include the changes to `src/heap/scavenger.cc` as it does not exist in the V8 included in the v4.x stream. Original commit message: Filter out stale left-trimmed handles for scavenges The missing part from https://codereview.chromium.org/2078403002/ R=jochen@chromium.org BUG=chromium:621869 LOG=N Review-Url: https://codereview.chromium.org/2077353004 Cr-Commit-Position: refs/heads/master@{#37184} PR-URL: https://github.com/nodejs/node/pull/10668 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/src/heap/heap-inl.h | 25 +++++++++++++++++- deps/v8/src/heap/heap.h | 6 +++++ deps/v8/src/heap/mark-compact.cc | 26 +------------------ deps/v8/src/objects-inl.h | 2 +- deps/v8/src/objects.h | 2 +- .../v8/test/mjsunit/regress/regress-621869.js | 18 +++++++++++++ 6 files changed, 51 insertions(+), 28 deletions(-) create mode 100644 deps/v8/test/mjsunit/regress/regress-621869.js diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index fdb1d7345b4d79..39110f6d58e17a 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -393,12 +393,35 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { return false; } - void Heap::CopyBlock(Address dst, Address src, int byte_size) { CopyWords(reinterpret_cast(dst), reinterpret_cast(src), static_cast(byte_size / kPointerSize)); } +bool Heap::PurgeLeftTrimmedObject(Object** object) { + HeapObject* current = reinterpret_cast(*object); + const MapWord map_word = current->map_word(); + if (current->IsFiller() && !map_word.IsForwardingAddress()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *object = nullptr; + return true; + } + return false; +} void Heap::MoveBlock(Address dst, Address src, int byte_size) { DCHECK(IsAligned(byte_size, kPointerSize)); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 0afac311c4816e..529050c8bf51ce 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -590,6 +590,12 @@ class Heap { // jslimit_/real_jslimit_ variable in the StackGuard. void SetStackLimits(); + // We cannot avoid stale handles to left-trimmed objects, but can only make + // sure all handles still needed are updated. Filter out a stale pointer + // and clear the slot to allow post processing of handles (needed because + // the sweeper might actually free the underlying page). + inline bool PurgeLeftTrimmedObject(Object** object); + // Notifies the heap that is ok to start marking or other activities that // should not happen during deserialization. void NotifyDeserializationComplete(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index e39ff83e9f381e..3a71578f713632 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1650,31 +1650,7 @@ class RootMarkingVisitor : public ObjectVisitor { HeapObject* object = ShortCircuitConsString(p); - // We cannot avoid stale handles to left-trimmed objects, but can only make - // sure all handles still needed are updated. Filter out any stale pointers - // and clear the slot to allow post processing of handles (needed because - // the sweeper might actually free the underlying page). - if (object->IsFiller()) { -#ifdef DEBUG - // We need to find a FixedArrayBase map after walking the fillers. - Heap* heap = collector_->heap(); - HeapObject* current = object; - while (current->IsFiller()) { - Address next = reinterpret_cast
(current); - if (current->map() == heap->one_pointer_filler_map()) { - next += kPointerSize; - } else if (current->map() == heap->two_pointer_filler_map()) { - next += 2 * kPointerSize; - } else { - next += current->Size(); - } - current = reinterpret_cast(next); - } - DCHECK(current->IsFixedArrayBase()); -#endif // DEBUG - *p = nullptr; - return; - } + if (collector_->heap()->PurgeLeftTrimmedObject(p)) return; MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return; diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index 3caf52bff4b086..3e91c2bac97051 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -1351,7 +1351,7 @@ Map* MapWord::ToMap() { } -bool MapWord::IsForwardingAddress() { +bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(reinterpret_cast(value_)); } diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index 1c5743eb838918..5481b1834de762 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -1382,7 +1382,7 @@ class MapWord BASE_EMBEDDED { // True if this map word is a forwarding address for a scavenge // collection. Only valid during a scavenge collection (specifically, // when all map words are heap object pointers, i.e. not during a full GC). - inline bool IsForwardingAddress(); + inline bool IsForwardingAddress() const; // Create a map word from a forwarding address. static inline MapWord FromForwardingAddress(HeapObject* object); diff --git a/deps/v8/test/mjsunit/regress/regress-621869.js b/deps/v8/test/mjsunit/regress/regress-621869.js new file mode 100644 index 00000000000000..ee1b58b0266032 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-621869.js @@ -0,0 +1,18 @@ +// Copyright 2016 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-gc + +var o0 = []; +var o1 = []; +var cnt = 0; +var only_scavenge = true; +o1.__defineGetter__(0, function() { + if (cnt++ > 2) return; + o0.shift(); + gc(only_scavenge); + o0.push((64)); + o0.concat(o1); +}); +o1[0]; \ No newline at end of file From e0db108d418816b8eed8eb36a85e4b6830ed4662 Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 29 Jun 2016 01:16:07 -0700 Subject: [PATCH 3/4] deps: backport a715957 from V8 upstream This commit does not include the changes to `src/heap/scavenger.cc`. These changes would revert the changes that should have come in 086bd5aede, meaning that there is no issue with that change missing in the previous commit. Original commit message: Iterate handles with special left-trim visitor BUG=chromium:620553 LOG=N R=hpayer@chromium.org Review-Url: https://codereview.chromium.org/2102243002 Cr-Commit-Position: refs/heads/master@{#37366} PR-URL: https://github.com/nodejs/node/pull/10668 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/src/heap/heap-inl.h | 25 ------------------ deps/v8/src/heap/heap.cc | 45 ++++++++++++++++++++++++++++++++ deps/v8/src/heap/heap.h | 6 ----- deps/v8/src/heap/mark-compact.cc | 2 -- 4 files changed, 45 insertions(+), 33 deletions(-) diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index 39110f6d58e17a..20540b9e88e992 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -398,31 +398,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) { static_cast(byte_size / kPointerSize)); } -bool Heap::PurgeLeftTrimmedObject(Object** object) { - HeapObject* current = reinterpret_cast(*object); - const MapWord map_word = current->map_word(); - if (current->IsFiller() && !map_word.IsForwardingAddress()) { -#ifdef DEBUG - // We need to find a FixedArrayBase map after walking the fillers. - while (current->IsFiller()) { - Address next = reinterpret_cast
(current); - if (current->map() == one_pointer_filler_map()) { - next += kPointerSize; - } else if (current->map() == two_pointer_filler_map()) { - next += 2 * kPointerSize; - } else { - next += current->Size(); - } - current = reinterpret_cast(next); - } - DCHECK(current->IsFixedArrayBase()); -#endif // DEBUG - *object = nullptr; - return true; - } - return false; -} - void Heap::MoveBlock(Address dst, Address src, int byte_size) { DCHECK(IsAligned(byte_size, kPointerSize)); diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 6bc200a0e59289..7730327b412d73 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -5316,6 +5316,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) { v->Synchronize(VisitorSynchronization::kSmiRootList); } +// We cannot avoid stale handles to left-trimmed objects, but can only make +// sure all handles still needed are updated. Filter out a stale pointer +// and clear the slot to allow post processing of handles (needed because +// the sweeper might actually free the underlying page). +class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor { + public: + explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { + USE(heap_); + } + + void VisitPointer(Object** p) override { FixHandle(p); } + + void VisitPointers(Object** start, Object** end) override { + for (Object** p = start; p < end; p++) FixHandle(p); + } + + private: + inline void FixHandle(Object** p) { + HeapObject* current = reinterpret_cast(*p); + if (!current->IsHeapObject()) return; + const MapWord map_word = current->map_word(); + if (!map_word.IsForwardingAddress() && current->IsFiller()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == heap_->one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == heap_->two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *p = nullptr; + } + } + + Heap* heap_; +}; void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); @@ -5339,6 +5382,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->Synchronize(VisitorSynchronization::kCompilationCache); // Iterate over local handles in handle scopes. + FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this); + isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor); isolate_->handle_scope_implementer()->Iterate(v); isolate_->IterateDeferredHandles(v); v->Synchronize(VisitorSynchronization::kHandleScope); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 529050c8bf51ce..0afac311c4816e 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -590,12 +590,6 @@ class Heap { // jslimit_/real_jslimit_ variable in the StackGuard. void SetStackLimits(); - // We cannot avoid stale handles to left-trimmed objects, but can only make - // sure all handles still needed are updated. Filter out a stale pointer - // and clear the slot to allow post processing of handles (needed because - // the sweeper might actually free the underlying page). - inline bool PurgeLeftTrimmedObject(Object** object); - // Notifies the heap that is ok to start marking or other activities that // should not happen during deserialization. void NotifyDeserializationComplete(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index 3a71578f713632..c827237598ee43 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1650,8 +1650,6 @@ class RootMarkingVisitor : public ObjectVisitor { HeapObject* object = ShortCircuitConsString(p); - if (collector_->heap()->PurgeLeftTrimmedObject(p)) return; - MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return; From 20bee0ff09a2c81c2ec13e7d49fa4cec4568409a Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Thu, 12 Jan 2017 17:33:09 -0500 Subject: [PATCH 4/4] deps: update patch level in V8 PR-URL: https://github.com/nodejs/node/pull/10668 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/include/v8-version.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index f88cdeddb2a237..89cf41c1f70d52 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 4 #define V8_MINOR_VERSION 5 #define V8_BUILD_NUMBER 103 -#define V8_PATCH_LEVEL 44 +#define V8_PATCH_LEVEL 45 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.)