@@ -2079,40 +2079,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
2079
2079
// for pointers to from semispace instead of looking for pointers
2080
2080
// to new space.
2081
2081
DCHECK (!target->IsMap ());
2082
- Address obj_address = target->address ();
2083
-
2084
- // We are not collecting slots on new space objects during mutation
2085
- // thus we have to scan for pointers to evacuation candidates when we
2086
- // promote objects. But we should not record any slots in non-black
2087
- // objects. Grey object's slots would be rescanned.
2088
- // White object might not survive until the end of collection
2089
- // it would be a violation of the invariant to record it's slots.
2090
- bool record_slots = false ;
2091
- if (incremental_marking ()->IsCompacting ()) {
2092
- MarkBit mark_bit = Marking::MarkBitFrom (target);
2093
- record_slots = Marking::IsBlack (mark_bit);
2094
- }
2095
- #if V8_DOUBLE_FIELDS_UNBOXING
2096
- LayoutDescriptorHelper helper (target->map ());
2097
- bool has_only_tagged_fields = helper.all_fields_tagged ();
2098
-
2099
- if (!has_only_tagged_fields) {
2100
- for (int offset = 0 ; offset < size;) {
2101
- int end_of_region_offset;
2102
- if (helper.IsTagged (offset, size, &end_of_region_offset)) {
2103
- IterateAndMarkPointersToFromSpace (
2104
- record_slots, obj_address + offset,
2105
- obj_address + end_of_region_offset, &ScavengeObject);
2106
- }
2107
- offset = end_of_region_offset;
2108
- }
2109
- } else {
2110
- #endif
2111
- IterateAndMarkPointersToFromSpace (
2112
- record_slots, obj_address, obj_address + size, &ScavengeObject);
2113
- #if V8_DOUBLE_FIELDS_UNBOXING
2114
- }
2115
- #endif
2082
+
2083
+ IteratePointersToFromSpace (target, size, &ScavengeObject);
2116
2084
}
2117
2085
}
2118
2086
@@ -5263,6 +5231,67 @@ void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
5263
5231
}
5264
5232
5265
5233
5234
+ void Heap::IteratePointersToFromSpace (HeapObject* target, int size,
5235
+ ObjectSlotCallback callback) {
5236
+ Address obj_address = target->address ();
5237
+
5238
+ // We are not collecting slots on new space objects during mutation
5239
+ // thus we have to scan for pointers to evacuation candidates when we
5240
+ // promote objects. But we should not record any slots in non-black
5241
+ // objects. Grey object's slots would be rescanned.
5242
+ // White object might not survive until the end of collection
5243
+ // it would be a violation of the invariant to record it's slots.
5244
+ bool record_slots = false ;
5245
+ if (incremental_marking ()->IsCompacting ()) {
5246
+ MarkBit mark_bit = Marking::MarkBitFrom (target);
5247
+ record_slots = Marking::IsBlack (mark_bit);
5248
+ }
5249
+
5250
+ // Do not scavenge JSArrayBuffer's contents
5251
+ switch (target->ContentType ()) {
5252
+ case HeapObjectContents::kTaggedValues : {
5253
+ IterateAndMarkPointersToFromSpace (record_slots, obj_address,
5254
+ obj_address + size, callback);
5255
+ break ;
5256
+ }
5257
+ case HeapObjectContents::kMixedValues : {
5258
+ if (target->IsFixedTypedArrayBase ()) {
5259
+ IterateAndMarkPointersToFromSpace (
5260
+ record_slots, obj_address + FixedTypedArrayBase::kBasePointerOffset ,
5261
+ obj_address + FixedTypedArrayBase::kHeaderSize , callback);
5262
+ } else if (target->IsJSArrayBuffer ()) {
5263
+ IterateAndMarkPointersToFromSpace (
5264
+ record_slots, obj_address,
5265
+ obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize ,
5266
+ callback);
5267
+ IterateAndMarkPointersToFromSpace (
5268
+ record_slots, obj_address + JSArrayBuffer::kSize ,
5269
+ obj_address + size, callback);
5270
+ #if V8_DOUBLE_FIELDS_UNBOXING
5271
+ } else if (FLAG_unbox_double_fields) {
5272
+ LayoutDescriptorHelper helper (target->map ());
5273
+ DCHECK (!helper.all_fields_tagged ());
5274
+
5275
+ for (int offset = 0 ; offset < size;) {
5276
+ int end_of_region_offset;
5277
+ if (helper.IsTagged (offset, size, &end_of_region_offset)) {
5278
+ IterateAndMarkPointersToFromSpace (
5279
+ record_slots, obj_address + offset,
5280
+ obj_address + end_of_region_offset, callback);
5281
+ }
5282
+ offset = end_of_region_offset;
5283
+ }
5284
+ #endif
5285
+ }
5286
+ break ;
5287
+ }
5288
+ case HeapObjectContents::kRawValues : {
5289
+ break ;
5290
+ }
5291
+ }
5292
+ }
5293
+
5294
+
5266
5295
void Heap::IterateRoots (ObjectVisitor* v, VisitMode mode) {
5267
5296
IterateStrongRoots (v, mode);
5268
5297
IterateWeakRoots (v, mode);
0 commit comments