@@ -2016,42 +2016,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
2016
2016
// for pointers to from semispace instead of looking for pointers
2017
2017
// to new space.
2018
2018
DCHECK (!target->IsMap ());
2019
- Address obj_address = target->address ();
2020
-
2021
- // We are not collecting slots on new space objects during mutation
2022
- // thus we have to scan for pointers to evacuation candidates when we
2023
- // promote objects. But we should not record any slots in non-black
2024
- // objects. Grey object's slots would be rescanned.
2025
- // White object might not survive until the end of collection
2026
- // it would be a violation of the invariant to record it's slots.
2027
- bool record_slots = false ;
2028
- if (incremental_marking ()->IsCompacting ()) {
2029
- MarkBit mark_bit = Marking::MarkBitFrom (target);
2030
- record_slots = Marking::IsBlack (mark_bit);
2031
- }
2032
- #if V8_DOUBLE_FIELDS_UNBOXING
2033
- LayoutDescriptorHelper helper (target->map ());
2034
- bool has_only_tagged_fields = helper.all_fields_tagged ();
2035
-
2036
- if (!has_only_tagged_fields) {
2037
- for (int offset = 0 ; offset < size;) {
2038
- int end_of_region_offset;
2039
- if (helper.IsTagged (offset, size, &end_of_region_offset)) {
2040
- IterateAndMarkPointersToFromSpace (
2041
- target, obj_address + offset,
2042
- obj_address + end_of_region_offset, record_slots,
2043
- &ScavengeObject);
2044
- }
2045
- offset = end_of_region_offset;
2046
- }
2047
- } else {
2048
- #endif
2049
- IterateAndMarkPointersToFromSpace (target, obj_address,
2050
- obj_address + size, record_slots,
2051
- &ScavengeObject);
2052
- #if V8_DOUBLE_FIELDS_UNBOXING
2053
- }
2054
- #endif
2019
+
2020
+ IteratePointersToFromSpace (target, size, &ScavengeObject);
2055
2021
}
2056
2022
}
2057
2023
@@ -5184,6 +5150,68 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
5184
5150
}
5185
5151
5186
5152
5153
+ void Heap::IteratePointersToFromSpace (HeapObject* target, int size,
5154
+ ObjectSlotCallback callback) {
5155
+ Address obj_address = target->address ();
5156
+
5157
+ // We are not collecting slots on new space objects during mutation
5158
+ // thus we have to scan for pointers to evacuation candidates when we
5159
+ // promote objects. But we should not record any slots in non-black
5160
+ // objects. Grey object's slots would be rescanned.
5161
+ // White object might not survive until the end of collection
5162
+ // it would be a violation of the invariant to record it's slots.
5163
+ bool record_slots = false ;
5164
+ if (incremental_marking ()->IsCompacting ()) {
5165
+ MarkBit mark_bit = Marking::MarkBitFrom (target);
5166
+ record_slots = Marking::IsBlack (mark_bit);
5167
+ }
5168
+
5169
+ // Do not scavenge JSArrayBuffer's contents
5170
+ switch (target->ContentType ()) {
5171
+ case HeapObjectContents::kTaggedValues : {
5172
+ IterateAndMarkPointersToFromSpace (target, obj_address, obj_address + size,
5173
+ record_slots, callback);
5174
+ break ;
5175
+ }
5176
+ case HeapObjectContents::kMixedValues : {
5177
+ if (target->IsFixedTypedArrayBase ()) {
5178
+ IterateAndMarkPointersToFromSpace (
5179
+ target, obj_address + FixedTypedArrayBase::kBasePointerOffset ,
5180
+ obj_address + FixedTypedArrayBase::kHeaderSize , record_slots,
5181
+ callback);
5182
+ } else if (target->IsJSArrayBuffer ()) {
5183
+ IterateAndMarkPointersToFromSpace (
5184
+ target, obj_address,
5185
+ obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize ,
5186
+ record_slots, callback);
5187
+ IterateAndMarkPointersToFromSpace (
5188
+ target, obj_address + JSArrayBuffer::kSize , obj_address + size,
5189
+ record_slots, callback);
5190
+ #if V8_DOUBLE_FIELDS_UNBOXING
5191
+ } else if (FLAG_unbox_double_fields) {
5192
+ LayoutDescriptorHelper helper (target->map ());
5193
+ DCHECK (!helper.all_fields_tagged ());
5194
+
5195
+ for (int offset = 0 ; offset < size;) {
5196
+ int end_of_region_offset;
5197
+ if (helper.IsTagged (offset, size, &end_of_region_offset)) {
5198
+ IterateAndMarkPointersToFromSpace (
5199
+ target, obj_address + offset,
5200
+ obj_address + end_of_region_offset, record_slots, callback);
5201
+ }
5202
+ offset = end_of_region_offset;
5203
+ }
5204
+ #endif
5205
+ }
5206
+ break ;
5207
+ }
5208
+ case HeapObjectContents::kRawValues : {
5209
+ break ;
5210
+ }
5211
+ }
5212
+ }
5213
+
5214
+
5187
5215
void Heap::IterateRoots (ObjectVisitor* v, VisitMode mode) {
5188
5216
IterateStrongRoots (v, mode);
5189
5217
IterateWeakRoots (v, mode);
0 commit comments