Skip to content

Commit 6a457bd

Browse files
indutnyrvagg
authored andcommitted
deps: backport 8d6a228 from the v8's upstream
Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: #3549 Reviewed-By: Trevor Norris <trev.norris@gmail.com>
1 parent 8b54f40 commit 6a457bd

3 files changed

Lines changed: 92 additions & 34 deletions

File tree

deps/v8/src/heap/heap.cc

Lines changed: 63 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1991,40 +1991,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
19911991
// for pointers to from semispace instead of looking for pointers
19921992
// to new space.
19931993
DCHECK(!target->IsMap());
1994-
Address obj_address = target->address();
1995-
1996-
// We are not collecting slots on new space objects during mutation
1997-
// thus we have to scan for pointers to evacuation candidates when we
1998-
// promote objects. But we should not record any slots in non-black
1999-
// objects. Grey object's slots would be rescanned.
2000-
// White object might not survive until the end of collection
2001-
// it would be a violation of the invariant to record it's slots.
2002-
bool record_slots = false;
2003-
if (incremental_marking()->IsCompacting()) {
2004-
MarkBit mark_bit = Marking::MarkBitFrom(target);
2005-
record_slots = Marking::IsBlack(mark_bit);
2006-
}
2007-
#if V8_DOUBLE_FIELDS_UNBOXING
2008-
LayoutDescriptorHelper helper(target->map());
2009-
bool has_only_tagged_fields = helper.all_fields_tagged();
2010-
2011-
if (!has_only_tagged_fields) {
2012-
for (int offset = 0; offset < size;) {
2013-
int end_of_region_offset;
2014-
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
2015-
IterateAndMarkPointersToFromSpace(
2016-
record_slots, obj_address + offset,
2017-
obj_address + end_of_region_offset, &ScavengeObject);
2018-
}
2019-
offset = end_of_region_offset;
2020-
}
2021-
} else {
2022-
#endif
2023-
IterateAndMarkPointersToFromSpace(
2024-
record_slots, obj_address, obj_address + size, &ScavengeObject);
2025-
#if V8_DOUBLE_FIELDS_UNBOXING
2026-
}
2027-
#endif
1994+
1995+
IteratePointersToFromSpace(target, size, &ScavengeObject);
20281996
}
20291997
}
20301998

@@ -5058,6 +5026,67 @@ void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
50585026
}
50595027

50605028

5029+
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
5030+
ObjectSlotCallback callback) {
5031+
Address obj_address = target->address();
5032+
5033+
// We are not collecting slots on new space objects during mutation
5034+
// thus we have to scan for pointers to evacuation candidates when we
5035+
// promote objects. But we should not record any slots in non-black
5036+
// objects. Grey object's slots would be rescanned.
5037+
// White object might not survive until the end of collection
5038+
// it would be a violation of the invariant to record it's slots.
5039+
bool record_slots = false;
5040+
if (incremental_marking()->IsCompacting()) {
5041+
MarkBit mark_bit = Marking::MarkBitFrom(target);
5042+
record_slots = Marking::IsBlack(mark_bit);
5043+
}
5044+
5045+
// Do not scavenge JSArrayBuffer's contents
5046+
switch (target->ContentType()) {
5047+
case HeapObjectContents::kTaggedValues: {
5048+
IterateAndMarkPointersToFromSpace(record_slots, obj_address,
5049+
obj_address + size, callback);
5050+
break;
5051+
}
5052+
case HeapObjectContents::kMixedValues: {
5053+
if (target->IsFixedTypedArrayBase()) {
5054+
IterateAndMarkPointersToFromSpace(
5055+
record_slots, obj_address + FixedTypedArrayBase::kBasePointerOffset,
5056+
obj_address + FixedTypedArrayBase::kHeaderSize, callback);
5057+
} else if (target->IsJSArrayBuffer()) {
5058+
IterateAndMarkPointersToFromSpace(
5059+
record_slots, obj_address,
5060+
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
5061+
callback);
5062+
IterateAndMarkPointersToFromSpace(
5063+
record_slots, obj_address + JSArrayBuffer::kSize,
5064+
obj_address + size, callback);
5065+
#if V8_DOUBLE_FIELDS_UNBOXING
5066+
} else if (FLAG_unbox_double_fields) {
5067+
LayoutDescriptorHelper helper(target->map());
5068+
DCHECK(!helper.all_fields_tagged());
5069+
5070+
for (int offset = 0; offset < size;) {
5071+
int end_of_region_offset;
5072+
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
5073+
IterateAndMarkPointersToFromSpace(
5074+
record_slots, obj_address + offset,
5075+
obj_address + end_of_region_offset, callback);
5076+
}
5077+
offset = end_of_region_offset;
5078+
}
5079+
#endif
5080+
}
5081+
break;
5082+
}
5083+
case HeapObjectContents::kRawValues: {
5084+
break;
5085+
}
5086+
}
5087+
}
5088+
5089+
50615090
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
50625091
IterateStrongRoots(v, mode);
50635092
IterateWeakRoots(v, mode);

deps/v8/src/heap/heap.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -918,6 +918,9 @@ class Heap {
918918

919919
// Iterate pointers to from semispace of new space found in memory interval
920920
// from start to end.
921+
void IteratePointersToFromSpace(HeapObject* target, int size,
922+
ObjectSlotCallback callback);
923+
921924
void IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
922925
Address end,
923926
ObjectSlotCallback callback);

deps/v8/test/cctest/test-api.cc

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14194,6 +14194,32 @@ static void StackTraceFunctionNameListener(v8::Handle<v8::Message> message,
1419414194
}
1419514195

1419614196

14197+
THREADED_TEST(SkipArrayBufferDuringScavenge) {
14198+
LocalContext env;
14199+
v8::Isolate* isolate = env->GetIsolate();
14200+
v8::HandleScope handle_scope(isolate);
14201+
14202+
// Make sure the pointer looks like a heap object
14203+
Local<v8::Object> tmp = v8::Object::New(isolate);
14204+
uint8_t* store_ptr =
14205+
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
14206+
14207+
// Make `store_ptr` point to from space
14208+
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
14209+
14210+
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
14211+
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
14212+
14213+
// Should not crash,
14214+
// i.e. backing store pointer should not be treated as a heap object pointer
14215+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
14216+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
14217+
14218+
// Use `ab` to silence compiler warning
14219+
CHECK_EQ(ab->GetContents().Data(), store_ptr);
14220+
}
14221+
14222+
1419714223
TEST(GetStackTraceContainsFunctionsWithFunctionName) {
1419814224
LocalContext env;
1419914225
v8::HandleScope scope(env->GetIsolate());

0 commit comments

Comments
 (0)