|
| 1 | +diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
| 2 | +index 3d95373..5bcc909 100644 |
| 3 | +--- a/src/heap/heap.cc |
| 4 | ++++ b/src/heap/heap.cc |
| 5 | +@@ -1854,62 +1854,17 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
| 6 | + } |
| 7 | + |
| 8 | + |
| 9 | +-void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, |
| 10 | +- void* data, size_t length) { |
| 11 | +- live_buffers[data] = length; |
| 12 | +-} |
| 13 | +- |
| 14 | +- |
| 15 | +-void Heap::UnregisterArrayBufferHelper( |
| 16 | +- std::map<void*, size_t>& live_buffers, |
| 17 | +- std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
| 18 | +- DCHECK(live_buffers.count(data) > 0); |
| 19 | +- live_buffers.erase(data); |
| 20 | +- not_yet_discovered_buffers.erase(data); |
| 21 | +-} |
| 22 | +- |
| 23 | +- |
| 24 | +-void Heap::RegisterLiveArrayBufferHelper( |
| 25 | +- std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
| 26 | +- not_yet_discovered_buffers.erase(data); |
| 27 | +-} |
| 28 | +- |
| 29 | +- |
| 30 | +-size_t Heap::FreeDeadArrayBuffersHelper( |
| 31 | +- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
| 32 | +- std::map<void*, size_t>& not_yet_discovered_buffers) { |
| 33 | +- size_t freed_memory = 0; |
| 34 | +- for (auto buffer = not_yet_discovered_buffers.begin(); |
| 35 | +- buffer != not_yet_discovered_buffers.end(); ++buffer) { |
| 36 | +- isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
| 37 | +- freed_memory += buffer->second; |
| 38 | +- live_buffers.erase(buffer->first); |
| 39 | +- } |
| 40 | +- not_yet_discovered_buffers = live_buffers; |
| 41 | +- return freed_memory; |
| 42 | +-} |
| 43 | +- |
| 44 | +- |
| 45 | +-void Heap::TearDownArrayBuffersHelper( |
| 46 | +- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
| 47 | +- std::map<void*, size_t>& not_yet_discovered_buffers) { |
| 48 | +- for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); |
| 49 | +- ++buffer) { |
| 50 | +- isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
| 51 | +- } |
| 52 | +- live_buffers.clear(); |
| 53 | +- not_yet_discovered_buffers.clear(); |
| 54 | +-} |
| 55 | +- |
| 56 | +- |
| 57 | + void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, |
| 58 | + size_t length) { |
| 59 | + if (!data) return; |
| 60 | +- RegisterNewArrayBufferHelper(live_array_buffers_, data, length); |
| 61 | + if (in_new_space) { |
| 62 | +- RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, |
| 63 | +- length); |
| 64 | ++ live_array_buffers_for_scavenge_[data] = length; |
| 65 | ++ } else { |
| 66 | ++ live_array_buffers_[data] = length; |
| 67 | + } |
| 68 | ++ |
| 69 | ++ // We may go over the limit of externally allocated memory here. We call the |
| 70 | ++ // api function to trigger a GC in this case. |
| 71 | + reinterpret_cast<v8::Isolate*>(isolate_) |
| 72 | + ->AdjustAmountOfExternalAllocatedMemory(length); |
| 73 | + } |
| 74 | +@@ -1917,57 +1872,75 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, |
| 75 | + |
| 76 | + void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { |
| 77 | + if (!data) return; |
| 78 | +- UnregisterArrayBufferHelper(live_array_buffers_, |
| 79 | +- not_yet_discovered_array_buffers_, data); |
| 80 | +- if (in_new_space) { |
| 81 | +- UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, |
| 82 | +- not_yet_discovered_array_buffers_for_scavenge_, |
| 83 | +- data); |
| 84 | +- } |
| 85 | ++ |
| 86 | ++ std::map<void*, size_t>* live_buffers = |
| 87 | ++ in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_; |
| 88 | ++ std::map<void*, size_t>* not_yet_discovered_buffers = |
| 89 | ++ in_new_space ? ¬_yet_discovered_array_buffers_for_scavenge_ |
| 90 | ++ : ¬_yet_discovered_array_buffers_; |
| 91 | ++ |
| 92 | ++ DCHECK(live_buffers->count(data) > 0); |
| 93 | ++ live_buffers->erase(data); |
| 94 | ++ not_yet_discovered_buffers->erase(data); |
| 95 | + } |
| 96 | + |
| 97 | + |
| 98 | + void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { |
| 99 | + // ArrayBuffer might be in the middle of being constructed. |
| 100 | + if (data == undefined_value()) return; |
| 101 | +- RegisterLiveArrayBufferHelper( |
| 102 | +- from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ |
| 103 | +- : not_yet_discovered_array_buffers_, |
| 104 | +- data); |
| 105 | ++ if (from_scavenge) { |
| 106 | ++ not_yet_discovered_array_buffers_for_scavenge_.erase(data); |
| 107 | ++ } else if (!not_yet_discovered_array_buffers_.erase(data)) { |
| 108 | ++ not_yet_discovered_array_buffers_for_scavenge_.erase(data); |
| 109 | ++ } |
| 110 | + } |
| 111 | + |
| 112 | + |
| 113 | + void Heap::FreeDeadArrayBuffers(bool from_scavenge) { |
| 114 | +- if (from_scavenge) { |
| 115 | +- for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { |
| 116 | +- not_yet_discovered_array_buffers_.erase(buffer.first); |
| 117 | +- live_array_buffers_.erase(buffer.first); |
| 118 | +- } |
| 119 | +- } else { |
| 120 | ++ size_t freed_memory = 0; |
| 121 | ++ for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { |
| 122 | ++ isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); |
| 123 | ++ freed_memory += buffer.second; |
| 124 | ++ live_array_buffers_for_scavenge_.erase(buffer.first); |
| 125 | ++ } |
| 126 | ++ |
| 127 | ++ if (!from_scavenge) { |
| 128 | + for (auto& buffer : not_yet_discovered_array_buffers_) { |
| 129 | +- // Scavenge can't happend during evacuation, so we only need to update |
| 130 | +- // live_array_buffers_for_scavenge_. |
| 131 | +- // not_yet_discovered_array_buffers_for_scanvenge_ will be reset before |
| 132 | +- // the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace. |
| 133 | +- live_array_buffers_for_scavenge_.erase(buffer.first); |
| 134 | ++ isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); |
| 135 | ++ freed_memory += buffer.second; |
| 136 | ++ live_array_buffers_.erase(buffer.first); |
| 137 | + } |
| 138 | + } |
| 139 | +- size_t freed_memory = FreeDeadArrayBuffersHelper( |
| 140 | +- isolate_, |
| 141 | +- from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_, |
| 142 | +- from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ |
| 143 | +- : not_yet_discovered_array_buffers_); |
| 144 | +- if (freed_memory) { |
| 145 | +- reinterpret_cast<v8::Isolate*>(isolate_) |
| 146 | +- ->AdjustAmountOfExternalAllocatedMemory( |
| 147 | +- -static_cast<int64_t>(freed_memory)); |
| 148 | +- } |
| 149 | ++ |
| 150 | ++ not_yet_discovered_array_buffers_for_scavenge_ = |
| 151 | ++ live_array_buffers_for_scavenge_; |
| 152 | ++ if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_; |
| 153 | ++ |
| 154 | ++ // Do not call through the api as this code is triggered while doing a GC. |
| 155 | ++ amount_of_external_allocated_memory_ -= freed_memory; |
| 156 | + } |
| 157 | + |
| 158 | + |
| 159 | + void Heap::TearDownArrayBuffers() { |
| 160 | +- TearDownArrayBuffersHelper(isolate_, live_array_buffers_, |
| 161 | +- not_yet_discovered_array_buffers_); |
| 162 | ++ size_t freed_memory = 0; |
| 163 | ++ for (auto& buffer : live_array_buffers_) { |
| 164 | ++ isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); |
| 165 | ++ freed_memory += buffer.second; |
| 166 | ++ } |
| 167 | ++ for (auto& buffer : live_array_buffers_for_scavenge_) { |
| 168 | ++ isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); |
| 169 | ++ freed_memory += buffer.second; |
| 170 | ++ } |
| 171 | ++ live_array_buffers_.clear(); |
| 172 | ++ live_array_buffers_for_scavenge_.clear(); |
| 173 | ++ not_yet_discovered_array_buffers_.clear(); |
| 174 | ++ not_yet_discovered_array_buffers_for_scavenge_.clear(); |
| 175 | ++ |
| 176 | ++ if (freed_memory > 0) { |
| 177 | ++ reinterpret_cast<v8::Isolate*>(isolate_) |
| 178 | ++ ->AdjustAmountOfExternalAllocatedMemory( |
| 179 | ++ -static_cast<int64_t>(freed_memory)); |
| 180 | ++ } |
| 181 | + } |
| 182 | + |
| 183 | + |
| 184 | +@@ -1985,7 +1958,7 @@ void Heap::PromoteArrayBuffer(Object* obj) { |
| 185 | + // ArrayBuffer might be in the middle of being constructed. |
| 186 | + if (data == undefined_value()) return; |
| 187 | + DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); |
| 188 | +- DCHECK(live_array_buffers_.count(data) > 0); |
| 189 | ++ live_array_buffers_[data] = live_array_buffers_for_scavenge_[data]; |
| 190 | + live_array_buffers_for_scavenge_.erase(data); |
| 191 | + not_yet_discovered_array_buffers_for_scavenge_.erase(data); |
| 192 | + } |
| 193 | +diff --git a/src/heap/heap.h b/src/heap/heap.h |
| 194 | +index 4c9e3ad..0f0cfc1 100644 |
| 195 | +--- a/src/heap/heap.h |
| 196 | ++++ b/src/heap/heap.h |
| 197 | +@@ -2147,21 +2147,6 @@ class Heap { |
| 198 | + // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. |
| 199 | + void TearDownArrayBuffers(); |
| 200 | + |
| 201 | +- // These correspond to the non-Helper versions. |
| 202 | +- void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, |
| 203 | +- void* data, size_t length); |
| 204 | +- void UnregisterArrayBufferHelper( |
| 205 | +- std::map<void*, size_t>& live_buffers, |
| 206 | +- std::map<void*, size_t>& not_yet_discovered_buffers, void* data); |
| 207 | +- void RegisterLiveArrayBufferHelper( |
| 208 | +- std::map<void*, size_t>& not_yet_discovered_buffers, void* data); |
| 209 | +- size_t FreeDeadArrayBuffersHelper( |
| 210 | +- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
| 211 | +- std::map<void*, size_t>& not_yet_discovered_buffers); |
| 212 | +- void TearDownArrayBuffersHelper( |
| 213 | +- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
| 214 | +- std::map<void*, size_t>& not_yet_discovered_buffers); |
| 215 | +- |
| 216 | + // Record statistics before and after garbage collection. |
| 217 | + void ReportStatisticsBeforeGC(); |
| 218 | + void ReportStatisticsAfterGC(); |
0 commit comments