Skip to content

Commit 36ac3d6

Browse files
committed
deps: backport 8d6a228 from the v8's upstream
Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: #4259 Reviewed-By: Ali Ijaz Sheikh <[email protected]> Reviewed-By: James M Snell <[email protected]>
1 parent 69b94ec commit 36ac3d6

File tree

3 files changed

+97
-36
lines changed

3 files changed

+97
-36
lines changed

deps/v8/src/heap/heap.cc

+68-36
Original file line numberDiff line numberDiff line change
@@ -1876,42 +1876,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
18761876
// for pointers to from semispace instead of looking for pointers
18771877
// to new space.
18781878
DCHECK(!target->IsMap());
1879-
Address obj_address = target->address();
1880-
1881-
// We are not collecting slots on new space objects during mutation
1882-
// thus we have to scan for pointers to evacuation candidates when we
1883-
// promote objects. But we should not record any slots in non-black
1884-
// objects. Grey object's slots would be rescanned.
1885-
// White object might not survive until the end of collection
1886-
// it would be a violation of the invariant to record it's slots.
1887-
bool record_slots = false;
1888-
if (incremental_marking()->IsCompacting()) {
1889-
MarkBit mark_bit = Marking::MarkBitFrom(target);
1890-
record_slots = Marking::IsBlack(mark_bit);
1891-
}
1892-
#if V8_DOUBLE_FIELDS_UNBOXING
1893-
LayoutDescriptorHelper helper(target->map());
1894-
bool has_only_tagged_fields = helper.all_fields_tagged();
1895-
1896-
if (!has_only_tagged_fields) {
1897-
for (int offset = 0; offset < size;) {
1898-
int end_of_region_offset;
1899-
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
1900-
IterateAndMarkPointersToFromSpace(
1901-
target, obj_address + offset,
1902-
obj_address + end_of_region_offset, record_slots,
1903-
&Scavenger::ScavengeObject);
1904-
}
1905-
offset = end_of_region_offset;
1906-
}
1907-
} else {
1908-
#endif
1909-
IterateAndMarkPointersToFromSpace(target, obj_address,
1910-
obj_address + size, record_slots,
1911-
&Scavenger::ScavengeObject);
1912-
#if V8_DOUBLE_FIELDS_UNBOXING
1913-
}
1914-
#endif
1879+
1880+
IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject);
19151881
}
19161882
}
19171883

@@ -4438,6 +4404,72 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
44384404
}
44394405

44404406

4407+
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
4408+
ObjectSlotCallback callback) {
4409+
Address obj_address = target->address();
4410+
4411+
// We are not collecting slots on new space objects during mutation
4412+
// thus we have to scan for pointers to evacuation candidates when we
4413+
// promote objects. But we should not record any slots in non-black
4414+
// objects. Grey object's slots would be rescanned.
4415+
// White object might not survive until the end of collection
4416+
// it would be a violation of the invariant to record it's slots.
4417+
bool record_slots = false;
4418+
if (incremental_marking()->IsCompacting()) {
4419+
MarkBit mark_bit = Marking::MarkBitFrom(target);
4420+
record_slots = Marking::IsBlack(mark_bit);
4421+
}
4422+
4423+
// Do not scavenge JSArrayBuffer's contents
4424+
switch (target->ContentType()) {
4425+
case HeapObjectContents::kTaggedValues: {
4426+
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
4427+
record_slots, callback);
4428+
break;
4429+
}
4430+
case HeapObjectContents::kMixedValues: {
4431+
if (target->IsFixedTypedArrayBase()) {
4432+
IterateAndMarkPointersToFromSpace(
4433+
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
4434+
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
4435+
callback);
4436+
} else if (target->IsBytecodeArray()) {
4437+
IterateAndMarkPointersToFromSpace(
4438+
target, obj_address + BytecodeArray::kConstantPoolOffset,
4439+
obj_address + BytecodeArray::kHeaderSize, record_slots, callback);
4440+
} else if (target->IsJSArrayBuffer()) {
4441+
IterateAndMarkPointersToFromSpace(
4442+
target, obj_address,
4443+
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
4444+
record_slots, callback);
4445+
IterateAndMarkPointersToFromSpace(
4446+
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
4447+
record_slots, callback);
4448+
#if V8_DOUBLE_FIELDS_UNBOXING
4449+
} else if (FLAG_unbox_double_fields) {
4450+
LayoutDescriptorHelper helper(target->map());
4451+
DCHECK(!helper.all_fields_tagged());
4452+
4453+
for (int offset = 0; offset < size;) {
4454+
int end_of_region_offset;
4455+
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
4456+
IterateAndMarkPointersToFromSpace(
4457+
target, obj_address + offset,
4458+
obj_address + end_of_region_offset, record_slots, callback);
4459+
}
4460+
offset = end_of_region_offset;
4461+
}
4462+
#endif
4463+
}
4464+
break;
4465+
}
4466+
case HeapObjectContents::kRawValues: {
4467+
break;
4468+
}
4469+
}
4470+
}
4471+
4472+
44414473
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
44424474
IterateStrongRoots(v, mode);
44434475
IterateWeakRoots(v, mode);

deps/v8/src/heap/heap.h

+3
Original file line numberDiff line numberDiff line change
@@ -1237,6 +1237,9 @@ class Heap {
12371237

12381238
// Iterate pointers to from semispace of new space found in memory interval
12391239
// from start to end within |object|.
1240+
void IteratePointersToFromSpace(HeapObject* target, int size,
1241+
ObjectSlotCallback callback);
1242+
12401243
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
12411244
Address end, bool record_slots,
12421245
ObjectSlotCallback callback);

deps/v8/test/cctest/test-api.cc

+26
Original file line numberDiff line numberDiff line change
@@ -14191,6 +14191,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
1419114191
}
1419214192

1419314193

14194+
THREADED_TEST(SkipArrayBufferDuringScavenge) {
14195+
LocalContext env;
14196+
v8::Isolate* isolate = env->GetIsolate();
14197+
v8::HandleScope handle_scope(isolate);
14198+
14199+
// Make sure the pointer looks like a heap object
14200+
Local<v8::Object> tmp = v8::Object::New(isolate);
14201+
uint8_t* store_ptr =
14202+
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
14203+
14204+
// Make `store_ptr` point to from space
14205+
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
14206+
14207+
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
14208+
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
14209+
14210+
// Should not crash,
14211+
// i.e. backing store pointer should not be treated as a heap object pointer
14212+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
14213+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
14214+
14215+
// Use `ab` to silence compiler warning
14216+
CHECK_EQ(ab->GetContents().Data(), store_ptr);
14217+
}
14218+
14219+
1419414220
THREADED_TEST(SharedUint8Array) {
1419514221
i::FLAG_harmony_sharedarraybuffer = true;
1419614222
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,

0 commit comments

Comments
 (0)