Skip to content

Commit 4c7acc2

Browse files
targosMylesBorins
authored andcommitted
deps: V8: cherry-pick 6b0a953
Original commit message: [api] Add possibility for BackingStore to keep Allocator alive Add an `array_buffer_allocator_shared` field to the `Isolate::CreateParams` struct that allows embedders to share ownership of the ArrayBuffer::Allocator with V8, and which in particular means that when this method is used that the BackingStore deleter will not perform an use-after-free access to the Allocator under certain circumstances. For Background: tl;dr: This is necessary for Node.js to perform the transition to V8 7.9, because of the way that ArrayBuffer::Allocators and their lifetimes currently work there. In Node.js, each Worker thread has its own ArrayBuffer::Allocator. Changing that would currently be impractical, as each allocator depends on per-Isolate state. However, now that backing stores are managed globally and keep a pointer to the original ArrayBuffer::Allocator, this means that when transferring an ArrayBuffer (e.g. from one Worker to another through postMessage()), the original Allocator has to be kept alive until the ArrayBuffer no longer exists in the receiving Isolate (or until that Isolate is disposed). See [1] for an example Node.js test that fails with V8 7.9. This problem also existed for SharedArrayBuffers, where Node.js was broken by V8 earlier for the same reasons (see [2] for the bug report on that and [3] for the resolution in Node.js). For SharedArrayBuffers, we already had extensive tracking logic, so adding a shared_ptr to keep alive the ArrayBuffer::Allocator was not a significant amount of work. However, the mechanism for transferring non-shared ArrayBuffers is quite different, and it seems both easier for us and better for V8 from an API standpoint to keep the Allocator alive from where it is being referenced. By sharing memory with the custom deleter function/data pair, this comes at no memory overhead. [1]: #30044 [2]: nodejs/node-v8#115 [3]: #29637 Bug: v8:9380 Change-Id: Ibc2c4fb6341b53653cbd637bd8cb3d4ac43809c7 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1874347 Commit-Queue: Ulan Degenbaev <[email protected]> Reviewed-by: Ulan Degenbaev <[email protected]> Reviewed-by: Igor Sheludko <[email protected]> Cr-Commit-Position: refs/heads/master@{#64542} Refs: v8/v8@6b0a953 Backport-PR-URL: #30513 PR-URL: #30020 Reviewed-By: Colin Ihrig <[email protected]> Reviewed-By: Jiawen Geng <[email protected]> Reviewed-By: Anna Henningsen <[email protected]> Reviewed-By: Matteo Collina <[email protected]>
1 parent ebef1b2 commit 4c7acc2

File tree

7 files changed

+173
-25
lines changed

7 files changed

+173
-25
lines changed

common.gypi

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939

4040
# Reset this number to 0 on major V8 upgrades.
4141
# Increment by one for each non-official patch applied to deps/v8.
42-
'v8_embedder_string': '-node.12',
42+
'v8_embedder_string': '-node.13',
4343

4444
##### V8 defaults for Node.js #####
4545

deps/v8/include/v8.h

+11
Original file line numberDiff line numberDiff line change
@@ -4828,6 +4828,10 @@ enum class ArrayBufferCreationMode { kInternalized, kExternalized };
48284828
* V8. Clients should always use standard C++ memory ownership types (i.e.
48294829
* std::unique_ptr and std::shared_ptr) to manage lifetimes of backing stores
48304830
* properly, since V8 internal objects may alias backing stores.
4831+
*
4832+
* This object does not keep the underlying |ArrayBuffer::Allocator| alive by
4833+
* default. Use Isolate::CreateParams::array_buffer_allocator_shared when
4834+
* creating the Isolate to make it hold a reference to the allocator itself.
48314835
*/
48324836
class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
48334837
public:
@@ -7837,6 +7841,7 @@ class V8_EXPORT Isolate {
78377841
create_histogram_callback(nullptr),
78387842
add_histogram_sample_callback(nullptr),
78397843
array_buffer_allocator(nullptr),
7844+
array_buffer_allocator_shared(),
78407845
external_references(nullptr),
78417846
allow_atomics_wait(true),
78427847
only_terminate_in_safe_scope(false) {}
@@ -7876,8 +7881,14 @@ class V8_EXPORT Isolate {
78767881
/**
78777882
* The ArrayBuffer::Allocator to use for allocating and freeing the backing
78787883
* store of ArrayBuffers.
7884+
*
7885+
* If the shared_ptr version is used, the Isolate instance and every
7886+
* |BackingStore| allocated using this allocator hold a std::shared_ptr
7887+
* to the allocator, in order to facilitate lifetime
7888+
* management for the allocator instance.
78797889
*/
78807890
ArrayBuffer::Allocator* array_buffer_allocator;
7891+
std::shared_ptr<ArrayBuffer::Allocator> array_buffer_allocator_shared;
78817892

78827893
/**
78837894
* Specifies an optional nullptr-terminated array of raw addresses in the

deps/v8/src/api/api.cc

+9-2
Original file line numberDiff line numberDiff line change
@@ -8149,8 +8149,15 @@ Isolate* Isolate::Allocate() {
81498149
void Isolate::Initialize(Isolate* isolate,
81508150
const v8::Isolate::CreateParams& params) {
81518151
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
8152-
CHECK_NOT_NULL(params.array_buffer_allocator);
8153-
i_isolate->set_array_buffer_allocator(params.array_buffer_allocator);
8152+
if (auto allocator = params.array_buffer_allocator_shared) {
8153+
CHECK(params.array_buffer_allocator == nullptr ||
8154+
params.array_buffer_allocator == allocator.get());
8155+
i_isolate->set_array_buffer_allocator(allocator.get());
8156+
i_isolate->set_array_buffer_allocator_shared(std::move(allocator));
8157+
} else {
8158+
CHECK_NOT_NULL(params.array_buffer_allocator);
8159+
i_isolate->set_array_buffer_allocator(params.array_buffer_allocator);
8160+
}
81548161
if (params.snapshot_blob != nullptr) {
81558162
i_isolate->set_snapshot_blob(params.snapshot_blob);
81568163
} else {

deps/v8/src/execution/isolate.h

+10
Original file line numberDiff line numberDiff line change
@@ -1347,6 +1347,15 @@ class Isolate final : private HiddenFactory {
13471347
return array_buffer_allocator_;
13481348
}
13491349

1350+
void set_array_buffer_allocator_shared(
1351+
std::shared_ptr<v8::ArrayBuffer::Allocator> allocator) {
1352+
array_buffer_allocator_shared_ = std::move(allocator);
1353+
}
1354+
std::shared_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_shared()
1355+
const {
1356+
return array_buffer_allocator_shared_;
1357+
}
1358+
13501359
FutexWaitListNode* futex_wait_list_node() { return &futex_wait_list_node_; }
13511360

13521361
CancelableTaskManager* cancelable_task_manager() {
@@ -1758,6 +1767,7 @@ class Isolate final : private HiddenFactory {
17581767
uint32_t embedded_blob_size_ = 0;
17591768

17601769
v8::ArrayBuffer::Allocator* array_buffer_allocator_ = nullptr;
1770+
std::shared_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_shared_;
17611771

17621772
FutexWaitListNode futex_wait_list_node_;
17631773

deps/v8/src/objects/backing-store.cc

+27-10
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,11 @@ void BackingStore::Clear() {
114114
buffer_start_ = nullptr;
115115
byte_length_ = 0;
116116
has_guard_regions_ = false;
117+
if (holds_shared_ptr_to_allocator_) {
118+
type_specific_data_.v8_api_array_buffer_allocator_shared
119+
.std::shared_ptr<v8::ArrayBuffer::Allocator>::~shared_ptr();
120+
holds_shared_ptr_to_allocator_ = false;
121+
}
117122
type_specific_data_.v8_api_array_buffer_allocator = nullptr;
118123
}
119124

@@ -154,14 +159,14 @@ BackingStore::~BackingStore() {
154159
DCHECK(free_on_destruct_);
155160
TRACE_BS("BS:custome deleter bs=%p mem=%p (length=%zu, capacity=%zu)\n",
156161
this, buffer_start_, byte_length(), byte_capacity_);
157-
type_specific_data_.deleter(buffer_start_, byte_length_, deleter_data_);
162+
type_specific_data_.deleter.callback(buffer_start_, byte_length_,
163+
type_specific_data_.deleter.data);
158164
Clear();
159165
return;
160166
}
161167
if (free_on_destruct_) {
162168
// JSArrayBuffer backing store. Deallocate through the embedder's allocator.
163-
auto allocator = reinterpret_cast<v8::ArrayBuffer::Allocator*>(
164-
get_v8_api_array_buffer_allocator());
169+
auto allocator = get_v8_api_array_buffer_allocator();
165170
TRACE_BS("BS:free bs=%p mem=%p (length=%zu, capacity=%zu)\n", this,
166171
buffer_start_, byte_length(), byte_capacity_);
167172
allocator->Free(buffer_start_, byte_length_);
@@ -224,10 +229,22 @@ std::unique_ptr<BackingStore> BackingStore::Allocate(
224229

225230
TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result,
226231
result->buffer_start(), byte_length);
227-
result->type_specific_data_.v8_api_array_buffer_allocator = allocator;
232+
result->SetAllocatorFromIsolate(isolate);
228233
return std::unique_ptr<BackingStore>(result);
229234
}
230235

236+
void BackingStore::SetAllocatorFromIsolate(Isolate* isolate) {
237+
if (auto allocator_shared = isolate->array_buffer_allocator_shared()) {
238+
holds_shared_ptr_to_allocator_ = true;
239+
new (&type_specific_data_.v8_api_array_buffer_allocator_shared)
240+
std::shared_ptr<v8::ArrayBuffer::Allocator>(
241+
std::move(allocator_shared));
242+
} else {
243+
type_specific_data_.v8_api_array_buffer_allocator =
244+
isolate->array_buffer_allocator();
245+
}
246+
}
247+
231248
// Allocate a backing store for a Wasm memory. Always use the page allocator
232249
// and add guard regions.
233250
std::unique_ptr<BackingStore> BackingStore::TryAllocateWasmMemory(
@@ -470,8 +487,7 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
470487
free_on_destruct, // free_on_destruct
471488
false, // has_guard_regions
472489
false); // custom_deleter
473-
result->type_specific_data_.v8_api_array_buffer_allocator =
474-
isolate->array_buffer_allocator();
490+
result->SetAllocatorFromIsolate(isolate);
475491
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
476492
result->buffer_start(), result->byte_length());
477493
return std::unique_ptr<BackingStore>(result);
@@ -489,8 +505,7 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
489505
true, // free_on_destruct
490506
false, // has_guard_regions
491507
true); // custom_deleter
492-
result->type_specific_data_.deleter = deleter;
493-
result->deleter_data_ = deleter_data;
508+
result->type_specific_data_.deleter = {deleter, deleter_data};
494509
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
495510
result->buffer_start(), result->byte_length());
496511
return std::unique_ptr<BackingStore>(result);
@@ -510,10 +525,12 @@ std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
510525
return std::unique_ptr<BackingStore>(result);
511526
}
512527

513-
void* BackingStore::get_v8_api_array_buffer_allocator() {
528+
v8::ArrayBuffer::Allocator* BackingStore::get_v8_api_array_buffer_allocator() {
514529
CHECK(!is_wasm_memory_);
515530
auto array_buffer_allocator =
516-
type_specific_data_.v8_api_array_buffer_allocator;
531+
holds_shared_ptr_to_allocator_
532+
? type_specific_data_.v8_api_array_buffer_allocator_shared.get()
533+
: type_specific_data_.v8_api_array_buffer_allocator;
517534
CHECK_NOT_NULL(array_buffer_allocator);
518535
return array_buffer_allocator;
519536
}

deps/v8/src/objects/backing-store.h

+23-12
Original file line numberDiff line numberDiff line change
@@ -128,45 +128,56 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
128128
byte_capacity_(byte_capacity),
129129
is_shared_(shared == SharedFlag::kShared),
130130
is_wasm_memory_(is_wasm_memory),
131+
holds_shared_ptr_to_allocator_(false),
131132
free_on_destruct_(free_on_destruct),
132133
has_guard_regions_(has_guard_regions),
133134
globally_registered_(false),
134-
custom_deleter_(custom_deleter) {
135-
type_specific_data_.v8_api_array_buffer_allocator = nullptr;
136-
deleter_data_ = nullptr;
137-
}
135+
custom_deleter_(custom_deleter) {}
136+
void SetAllocatorFromIsolate(Isolate* isolate);
138137

139138
void* buffer_start_ = nullptr;
140139
std::atomic<size_t> byte_length_{0};
141140
size_t byte_capacity_ = 0;
142-
union {
141+
142+
struct DeleterInfo {
143+
v8::BackingStoreDeleterCallback callback;
144+
void* data;
145+
};
146+
147+
union TypeSpecificData {
148+
TypeSpecificData() : v8_api_array_buffer_allocator(nullptr) {}
149+
~TypeSpecificData() {}
150+
143151
// If this backing store was allocated through the ArrayBufferAllocator API,
144152
// this is a direct pointer to the API object for freeing the backing
145153
// store.
146-
// Note: we use {void*} here because we cannot forward-declare an inner
147-
// class from the API.
148-
void* v8_api_array_buffer_allocator;
154+
v8::ArrayBuffer::Allocator* v8_api_array_buffer_allocator;
155+
156+
// Holds a shared_ptr to the ArrayBuffer::Allocator instance, if requested
157+
// so by the embedder through setting
158+
// Isolate::CreateParams::array_buffer_allocator_shared.
159+
std::shared_ptr<v8::ArrayBuffer::Allocator>
160+
v8_api_array_buffer_allocator_shared;
149161

150162
// For shared Wasm memories, this is a list of all the attached memory
151163
// objects, which is needed to grow shared backing stores.
152164
SharedWasmMemoryData* shared_wasm_memory_data;
153165

154166
// Custom deleter for the backing stores that wrap memory blocks that are
155167
// allocated with a custom allocator.
156-
v8::BackingStoreDeleterCallback deleter;
168+
DeleterInfo deleter;
157169
} type_specific_data_;
158170

159-
void* deleter_data_;
160-
161171
bool is_shared_ : 1;
162172
bool is_wasm_memory_ : 1;
173+
bool holds_shared_ptr_to_allocator_ : 1;
163174
bool free_on_destruct_ : 1;
164175
bool has_guard_regions_ : 1;
165176
bool globally_registered_ : 1;
166177
bool custom_deleter_ : 1;
167178

168179
// Accessors for type-specific data.
169-
void* get_v8_api_array_buffer_allocator();
180+
v8::ArrayBuffer::Allocator* get_v8_api_array_buffer_allocator();
170181
SharedWasmMemoryData* get_shared_wasm_memory_data();
171182

172183
void Clear(); // Internally clears fields after deallocation.

deps/v8/test/cctest/test-api-array-buffer.cc

+92
Original file line numberDiff line numberDiff line change
@@ -608,3 +608,95 @@ TEST(SharedArrayBuffer_NewBackingStore_CustomDeleter) {
608608
}
609609
CHECK(backing_store_custom_called);
610610
}
611+
612+
class DummyAllocator final : public v8::ArrayBuffer::Allocator {
613+
public:
614+
DummyAllocator() : allocator_(NewDefaultAllocator()) {}
615+
616+
~DummyAllocator() override { CHECK_EQ(allocation_count(), 0); }
617+
618+
void* Allocate(size_t length) override {
619+
allocation_count_++;
620+
return allocator_->Allocate(length);
621+
}
622+
void* AllocateUninitialized(size_t length) override {
623+
allocation_count_++;
624+
return allocator_->AllocateUninitialized(length);
625+
}
626+
void Free(void* data, size_t length) override {
627+
allocation_count_--;
628+
allocator_->Free(data, length);
629+
}
630+
631+
uint64_t allocation_count() const { return allocation_count_; }
632+
633+
private:
634+
std::unique_ptr<v8::ArrayBuffer::Allocator> allocator_;
635+
uint64_t allocation_count_ = 0;
636+
};
637+
638+
TEST(BackingStore_HoldAllocatorAlive_UntilIsolateShutdown) {
639+
std::shared_ptr<DummyAllocator> allocator =
640+
std::make_shared<DummyAllocator>();
641+
std::weak_ptr<DummyAllocator> allocator_weak(allocator);
642+
643+
v8::Isolate::CreateParams create_params;
644+
create_params.array_buffer_allocator_shared = allocator;
645+
v8::Isolate* isolate = v8::Isolate::New(create_params);
646+
isolate->Enter();
647+
648+
allocator.reset();
649+
create_params.array_buffer_allocator_shared.reset();
650+
CHECK(!allocator_weak.expired());
651+
CHECK_EQ(allocator_weak.lock()->allocation_count(), 0);
652+
653+
{
654+
// Create an ArrayBuffer and do not garbage collect it. This should make
655+
// the allocator be released automatically once the Isolate is disposed.
656+
v8::HandleScope handle_scope(isolate);
657+
v8::Context::Scope context_scope(Context::New(isolate));
658+
v8::ArrayBuffer::New(isolate, 8);
659+
660+
// This should be inside the HandleScope, so that we can be sure that
661+
// the allocation is not garbage collected yet.
662+
CHECK(!allocator_weak.expired());
663+
CHECK_EQ(allocator_weak.lock()->allocation_count(), 1);
664+
}
665+
666+
isolate->Exit();
667+
isolate->Dispose();
668+
CHECK(allocator_weak.expired());
669+
}
670+
671+
TEST(BackingStore_HoldAllocatorAlive_AfterIsolateShutdown) {
672+
std::shared_ptr<DummyAllocator> allocator =
673+
std::make_shared<DummyAllocator>();
674+
std::weak_ptr<DummyAllocator> allocator_weak(allocator);
675+
676+
v8::Isolate::CreateParams create_params;
677+
create_params.array_buffer_allocator_shared = allocator;
678+
v8::Isolate* isolate = v8::Isolate::New(create_params);
679+
isolate->Enter();
680+
681+
allocator.reset();
682+
create_params.array_buffer_allocator_shared.reset();
683+
CHECK(!allocator_weak.expired());
684+
CHECK_EQ(allocator_weak.lock()->allocation_count(), 0);
685+
686+
std::shared_ptr<v8::BackingStore> backing_store;
687+
{
688+
// Create an ArrayBuffer and do not garbage collect it. This should make
689+
// the allocator be released automatically once the Isolate is disposed.
690+
v8::HandleScope handle_scope(isolate);
691+
v8::Context::Scope context_scope(Context::New(isolate));
692+
v8::Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 8);
693+
backing_store = ab->GetBackingStore();
694+
}
695+
696+
isolate->Exit();
697+
isolate->Dispose();
698+
CHECK(!allocator_weak.expired());
699+
CHECK_EQ(allocator_weak.lock()->allocation_count(), 1);
700+
backing_store.reset();
701+
CHECK(allocator_weak.expired());
702+
}

0 commit comments

Comments
 (0)