@@ -1876,42 +1876,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1876
1876
// for pointers to from semispace instead of looking for pointers
1877
1877
// to new space.
1878
1878
DCHECK (!target->IsMap ());
1879
- Address obj_address = target->address ();
1880
-
1881
- // We are not collecting slots on new space objects during mutation
1882
- // thus we have to scan for pointers to evacuation candidates when we
1883
- // promote objects. But we should not record any slots in non-black
1884
- // objects. Grey object's slots would be rescanned.
1885
- // White object might not survive until the end of collection
1886
- // it would be a violation of the invariant to record it's slots.
1887
- bool record_slots = false ;
1888
- if (incremental_marking ()->IsCompacting ()) {
1889
- MarkBit mark_bit = Marking::MarkBitFrom (target);
1890
- record_slots = Marking::IsBlack (mark_bit);
1891
- }
1892
- #if V8_DOUBLE_FIELDS_UNBOXING
1893
- LayoutDescriptorHelper helper (target->map ());
1894
- bool has_only_tagged_fields = helper.all_fields_tagged ();
1895
-
1896
- if (!has_only_tagged_fields) {
1897
- for (int offset = 0 ; offset < size;) {
1898
- int end_of_region_offset;
1899
- if (helper.IsTagged (offset, size, &end_of_region_offset)) {
1900
- IterateAndMarkPointersToFromSpace (
1901
- target, obj_address + offset,
1902
- obj_address + end_of_region_offset, record_slots,
1903
- &Scavenger::ScavengeObject);
1904
- }
1905
- offset = end_of_region_offset;
1906
- }
1907
- } else {
1908
- #endif
1909
- IterateAndMarkPointersToFromSpace (target, obj_address,
1910
- obj_address + size, record_slots,
1911
- &Scavenger::ScavengeObject);
1912
- #if V8_DOUBLE_FIELDS_UNBOXING
1913
- }
1914
- #endif
1879
+
1880
+ IteratePointersToFromSpace (target, size, &Scavenger::ScavengeObject);
1915
1881
}
1916
1882
}
1917
1883
@@ -4438,6 +4404,72 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
4438
4404
}
4439
4405
4440
4406
4407
+ void Heap::IteratePointersToFromSpace (HeapObject* target, int size,
4408
+ ObjectSlotCallback callback) {
4409
+ Address obj_address = target->address ();
4410
+
4411
+ // We are not collecting slots on new space objects during mutation
4412
+ // thus we have to scan for pointers to evacuation candidates when we
4413
+ // promote objects. But we should not record any slots in non-black
4414
+ // objects. Grey object's slots would be rescanned.
4415
+ // White object might not survive until the end of collection
4416
+ // it would be a violation of the invariant to record it's slots.
4417
+ bool record_slots = false ;
4418
+ if (incremental_marking ()->IsCompacting ()) {
4419
+ MarkBit mark_bit = Marking::MarkBitFrom (target);
4420
+ record_slots = Marking::IsBlack (mark_bit);
4421
+ }
4422
+
4423
+ // Do not scavenge JSArrayBuffer's contents
4424
+ switch (target->ContentType ()) {
4425
+ case HeapObjectContents::kTaggedValues : {
4426
+ IterateAndMarkPointersToFromSpace (target, obj_address, obj_address + size,
4427
+ record_slots, callback);
4428
+ break ;
4429
+ }
4430
+ case HeapObjectContents::kMixedValues : {
4431
+ if (target->IsFixedTypedArrayBase ()) {
4432
+ IterateAndMarkPointersToFromSpace (
4433
+ target, obj_address + FixedTypedArrayBase::kBasePointerOffset ,
4434
+ obj_address + FixedTypedArrayBase::kHeaderSize , record_slots,
4435
+ callback);
4436
+ } else if (target->IsBytecodeArray ()) {
4437
+ IterateAndMarkPointersToFromSpace (
4438
+ target, obj_address + BytecodeArray::kConstantPoolOffset ,
4439
+ obj_address + BytecodeArray::kHeaderSize , record_slots, callback);
4440
+ } else if (target->IsJSArrayBuffer ()) {
4441
+ IterateAndMarkPointersToFromSpace (
4442
+ target, obj_address,
4443
+ obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize ,
4444
+ record_slots, callback);
4445
+ IterateAndMarkPointersToFromSpace (
4446
+ target, obj_address + JSArrayBuffer::kSize , obj_address + size,
4447
+ record_slots, callback);
4448
+ #if V8_DOUBLE_FIELDS_UNBOXING
4449
+ } else if (FLAG_unbox_double_fields) {
4450
+ LayoutDescriptorHelper helper (target->map ());
4451
+ DCHECK (!helper.all_fields_tagged ());
4452
+
4453
+ for (int offset = 0 ; offset < size;) {
4454
+ int end_of_region_offset;
4455
+ if (helper.IsTagged (offset, size, &end_of_region_offset)) {
4456
+ IterateAndMarkPointersToFromSpace (
4457
+ target, obj_address + offset,
4458
+ obj_address + end_of_region_offset, record_slots, callback);
4459
+ }
4460
+ offset = end_of_region_offset;
4461
+ }
4462
+ #endif
4463
+ }
4464
+ break ;
4465
+ }
4466
+ case HeapObjectContents::kRawValues : {
4467
+ break ;
4468
+ }
4469
+ }
4470
+ }
4471
+
4472
+
4441
4473
void Heap::IterateRoots (ObjectVisitor* v, VisitMode mode) {
4442
4474
IterateStrongRoots (v, mode);
4443
4475
IterateWeakRoots (v, mode);
0 commit comments