From 68ee8e90891f69ebfebb42bf53bf5a4258357828 Mon Sep 17 00:00:00 2001 From: legendecas Date: Tue, 2 Aug 2022 00:01:02 +0800 Subject: [PATCH 001/177] src: split property helpers from node::Environment PR-URL: https://github.com/nodejs/node/pull/44056 Backport-PR-URL: https://github.com/nodejs/node/pull/44251 Refs: https://github.com/nodejs/node/issues/42528 Reviewed-By: Ben Noordhuis Reviewed-By: Matteo Collina Reviewed-By: Joyee Cheung Reviewed-By: Feng Yu --- src/README.md | 21 +-- src/async_wrap.cc | 28 ++-- src/cares_wrap.cc | 58 +++---- src/crypto/crypto_cipher.cc | 77 +++++---- src/crypto/crypto_context.cc | 114 ++++++------- src/crypto/crypto_dh.cc | 31 ++-- src/crypto/crypto_ec.cc | 25 +-- src/crypto/crypto_hash.cc | 14 +- src/crypto/crypto_hmac.cc | 12 +- src/crypto/crypto_keys.cc | 38 +++-- src/crypto/crypto_sig.cc | 23 +-- src/crypto/crypto_spkac.cc | 10 +- src/crypto/crypto_timing.cc | 3 +- src/crypto/crypto_tls.cc | 91 +++++----- src/crypto/crypto_util.cc | 13 +- src/crypto/crypto_util.h | 9 +- src/crypto/crypto_x509.cc | 52 +++--- src/env.cc | 147 +--------------- src/env.h | 50 ------ src/fs_event_wrap.cc | 8 +- src/handle_wrap.cc | 12 +- src/heap_utils.cc | 9 +- src/histogram.cc | 90 +++++----- src/inspector_js_api.cc | 72 ++++---- src/inspector_profiler.cc | 10 +- src/js_stream.cc | 14 +- src/js_udp_wrap.cc | 12 +- src/module_wrap.cc | 46 ++--- src/node.cc | 6 +- src/node_blob.cc | 23 +-- src/node_buffer.cc | 100 +++++------ src/node_contextify.cc | 36 ++-- src/node_credentials.cc | 24 +-- src/node_dir.cc | 11 +- src/node_dtrace.cc | 4 +- src/node_errors.cc | 30 ++-- src/node_file.cc | 92 +++++----- src/node_http2.cc | 76 +++++---- src/node_http_parser.cc | 48 +++--- src/node_i18n.cc | 18 +- src/node_messaging.cc | 50 +++--- src/node_native_module.cc | 20 ++- src/node_options.cc | 5 +- src/node_os.cc | 32 ++-- src/node_perf.cc | 28 ++-- src/node_process_methods.cc | 51 +++--- src/node_process_object.cc | 2 +- src/node_report_module.cc | 44 ++--- src/node_serdes.cc | 67 ++++---- src/node_snapshotable.cc | 15 +- src/node_sockaddr.cc | 43 ++--- src/node_stat_watcher.cc | 9 +- src/node_task_queue.cc | 11 +- src/node_trace_events.cc | 19 ++- src/node_types.cc | 10 +- src/node_url.cc | 11 +- src/node_util.cc | 44 ++--- src/node_v8.cc | 40 +++-- src/node_wasi.cc | 109 ++++++------ src/node_wasm_web_api.cc | 15 +- src/node_watchdog.cc | 11 +- src/node_worker.cc | 45 ++--- src/node_zlib.cc | 18 +- src/pipe_wrap.cc | 20 ++- src/process_wrap.cc | 10 +- src/signal_wrap.cc | 10 +- src/spawn_sync.cc | 3 +- src/stream_base.cc | 65 ++++---- src/stream_pipe.cc | 14 +- src/stream_wrap.cc | 41 ++--- src/string_decoder.cc | 4 +- src/tcp_wrap.cc | 40 +++-- src/timers.cc | 18 +- src/tty_wrap.cc | 15 +- src/udp_wrap.cc | 75 ++++----- src/util.cc | 157 ++++++++++++++++++ src/util.h | 60 +++++++ src/uv.cc | 8 +- ...otomethod-remove-unnecessary-prototypes.js | 2 +- 79 files changed, 1505 insertions(+), 1293 deletions(-) diff --git a/src/README.md b/src/README.md index 774614395afd40..b48402bacefae2 100644 --- a/src/README.md +++ b/src/README.md @@ -390,32 +390,33 @@ void Initialize(Local target, void* priv) { Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "getaddrinfo", GetAddrInfo); - env->SetMethod(target, "getnameinfo", GetNameInfo); + SetMethod(context, target, "getaddrinfo", GetAddrInfo); + SetMethod(context, target, "getnameinfo", GetNameInfo); // 'SetMethodNoSideEffect' means that debuggers can safely execute this // function for e.g. previews. - env->SetMethodNoSideEffect(target, "canonicalizeIP", CanonicalizeIP); + SetMethodNoSideEffect(context, target, "canonicalizeIP", CanonicalizeIP); // ... more code ... + Isolate* isolate = env->isolate(); // Building the `ChannelWrap` class for JS: Local channel_wrap = - env->NewFunctionTemplate(ChannelWrap::New); + NewFunctionTemplate(isolate, ChannelWrap::New); // Allow for 1 internal field, see `BaseObject` for details on this: channel_wrap->InstanceTemplate()->SetInternalFieldCount(1); channel_wrap->Inherit(AsyncWrap::GetConstructorTemplate(env)); // Set various methods on the class (i.e. on the prototype): - env->SetProtoMethod(channel_wrap, "queryAny", Query); - env->SetProtoMethod(channel_wrap, "queryA", Query); + SetProtoMethod(isolate, channel_wrap, "queryAny", Query); + SetProtoMethod(isolate, channel_wrap, "queryA", Query); // ... - env->SetProtoMethod(channel_wrap, "querySoa", Query); - env->SetProtoMethod(channel_wrap, "getHostByAddr", Query); + SetProtoMethod(isolate, channel_wrap, "querySoa", Query); + SetProtoMethod(isolate, channel_wrap, "getHostByAddr", Query); - env->SetProtoMethodNoSideEffect(channel_wrap, "getServers", GetServers); + SetProtoMethodNoSideEffect(isolate, channel_wrap, "getServers", GetServers); - env->SetConstructorFunction(target, "ChannelWrap", channel_wrap); + SetConstructorFunction(context, target, "ChannelWrap", channel_wrap); } // Run the `Initialize` function when loading this module through diff --git a/src/async_wrap.cc b/src/async_wrap.cc index 38f2eb421f487d..9e76ad63ca625f 100644 --- a/src/async_wrap.cc +++ b/src/async_wrap.cc @@ -337,12 +337,14 @@ void AsyncWrap::SetCallbackTrampoline(const FunctionCallbackInfo& args) { Local AsyncWrap::GetConstructorTemplate(Environment* env) { Local tmpl = env->async_wrap_ctor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(nullptr); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "AsyncWrap")); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(tmpl, "getAsyncId", AsyncWrap::GetAsyncId); - env->SetProtoMethod(tmpl, "asyncReset", AsyncWrap::AsyncReset); - env->SetProtoMethod(tmpl, "getProviderType", AsyncWrap::GetProviderType); + SetProtoMethod(isolate, tmpl, "getAsyncId", AsyncWrap::GetAsyncId); + SetProtoMethod(isolate, tmpl, "asyncReset", AsyncWrap::AsyncReset); + SetProtoMethod( + isolate, tmpl, "getProviderType", AsyncWrap::GetProviderType); env->set_async_wrap_ctor_template(tmpl); } return tmpl; @@ -356,15 +358,15 @@ void AsyncWrap::Initialize(Local target, Isolate* isolate = env->isolate(); HandleScope scope(isolate); - env->SetMethod(target, "setupHooks", SetupHooks); - env->SetMethod(target, "setCallbackTrampoline", SetCallbackTrampoline); - env->SetMethod(target, "pushAsyncContext", PushAsyncContext); - env->SetMethod(target, "popAsyncContext", PopAsyncContext); - env->SetMethod(target, "executionAsyncResource", ExecutionAsyncResource); - env->SetMethod(target, "clearAsyncIdStack", ClearAsyncIdStack); - env->SetMethod(target, "queueDestroyAsyncId", QueueDestroyAsyncId); - env->SetMethod(target, "setPromiseHooks", SetPromiseHooks); - env->SetMethod(target, "registerDestroyHook", RegisterDestroyHook); + SetMethod(context, target, "setupHooks", SetupHooks); + SetMethod(context, target, "setCallbackTrampoline", SetCallbackTrampoline); + SetMethod(context, target, "pushAsyncContext", PushAsyncContext); + SetMethod(context, target, "popAsyncContext", PopAsyncContext); + SetMethod(context, target, "executionAsyncResource", ExecutionAsyncResource); + SetMethod(context, target, "clearAsyncIdStack", ClearAsyncIdStack); + SetMethod(context, target, "queueDestroyAsyncId", QueueDestroyAsyncId); + SetMethod(context, target, "setPromiseHooks", SetPromiseHooks); + SetMethod(context, target, "registerDestroyHook", RegisterDestroyHook); PropertyAttribute ReadOnlyDontDelete = static_cast(ReadOnly | DontDelete); diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index a16a0dcbd1bcec..1757d56d09ab4d 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -1886,12 +1886,13 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - env->SetMethod(target, "getaddrinfo", GetAddrInfo); - env->SetMethod(target, "getnameinfo", GetNameInfo); - env->SetMethodNoSideEffect(target, "canonicalizeIP", CanonicalizeIP); + SetMethod(context, target, "getaddrinfo", GetAddrInfo); + SetMethod(context, target, "getnameinfo", GetNameInfo); + SetMethodNoSideEffect(context, target, "canonicalizeIP", CanonicalizeIP); - env->SetMethod(target, "strerror", StrError); + SetMethod(context, target, "strerror", StrError); target->Set(env->context(), FIXED_ONE_BYTE_STRING(env->isolate(), "AF_INET"), Integer::New(env->isolate(), AF_INET)).Check(); @@ -1913,44 +1914,45 @@ void Initialize(Local target, Local aiw = BaseObject::MakeLazilyInitializedJSTemplate(env); aiw->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "GetAddrInfoReqWrap", aiw); + SetConstructorFunction(context, target, "GetAddrInfoReqWrap", aiw); Local niw = BaseObject::MakeLazilyInitializedJSTemplate(env); niw->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "GetNameInfoReqWrap", niw); + SetConstructorFunction(context, target, "GetNameInfoReqWrap", niw); Local qrw = BaseObject::MakeLazilyInitializedJSTemplate(env); qrw->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "QueryReqWrap", qrw); + SetConstructorFunction(context, target, "QueryReqWrap", qrw); Local channel_wrap = - env->NewFunctionTemplate(ChannelWrap::New); + NewFunctionTemplate(isolate, ChannelWrap::New); channel_wrap->InstanceTemplate()->SetInternalFieldCount( ChannelWrap::kInternalFieldCount); channel_wrap->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(channel_wrap, "queryAny", Query); - env->SetProtoMethod(channel_wrap, "queryA", Query); - env->SetProtoMethod(channel_wrap, "queryAaaa", Query); - env->SetProtoMethod(channel_wrap, "queryCaa", Query); - env->SetProtoMethod(channel_wrap, "queryCname", Query); - env->SetProtoMethod(channel_wrap, "queryMx", Query); - env->SetProtoMethod(channel_wrap, "queryNs", Query); - env->SetProtoMethod(channel_wrap, "queryTxt", Query); - env->SetProtoMethod(channel_wrap, "querySrv", Query); - env->SetProtoMethod(channel_wrap, "queryPtr", Query); - env->SetProtoMethod(channel_wrap, "queryNaptr", Query); - env->SetProtoMethod(channel_wrap, "querySoa", Query); - env->SetProtoMethod(channel_wrap, "getHostByAddr", Query); - - env->SetProtoMethodNoSideEffect(channel_wrap, "getServers", GetServers); - env->SetProtoMethod(channel_wrap, "setServers", SetServers); - env->SetProtoMethod(channel_wrap, "setLocalAddress", SetLocalAddress); - env->SetProtoMethod(channel_wrap, "cancel", Cancel); - - env->SetConstructorFunction(target, "ChannelWrap", channel_wrap); + SetProtoMethod(isolate, channel_wrap, "queryAny", Query); + SetProtoMethod(isolate, channel_wrap, "queryA", Query); + SetProtoMethod(isolate, channel_wrap, "queryAaaa", Query); + SetProtoMethod(isolate, channel_wrap, "queryCaa", Query); + SetProtoMethod(isolate, channel_wrap, "queryCname", Query); + SetProtoMethod(isolate, channel_wrap, "queryMx", Query); + SetProtoMethod(isolate, channel_wrap, "queryNs", Query); + SetProtoMethod(isolate, channel_wrap, "queryTxt", Query); + SetProtoMethod(isolate, channel_wrap, "querySrv", Query); + SetProtoMethod(isolate, channel_wrap, "queryPtr", Query); + SetProtoMethod(isolate, channel_wrap, "queryNaptr", Query); + SetProtoMethod(isolate, channel_wrap, "querySoa", Query); + SetProtoMethod( + isolate, channel_wrap, "getHostByAddr", Query); + + SetProtoMethodNoSideEffect(isolate, channel_wrap, "getServers", GetServers); + SetProtoMethod(isolate, channel_wrap, "setServers", SetServers); + SetProtoMethod(isolate, channel_wrap, "setLocalAddress", SetLocalAddress); + SetProtoMethod(isolate, channel_wrap, "cancel", Cancel); + + SetConstructorFunction(context, target, "ChannelWrap", channel_wrap); } } // namespace cares_wrap diff --git a/src/crypto/crypto_cipher.cc b/src/crypto/crypto_cipher.cc index 6c663a2b21d0a2..0ce2708d12ceb8 100644 --- a/src/crypto/crypto_cipher.cc +++ b/src/crypto/crypto_cipher.cc @@ -13,10 +13,12 @@ namespace node { using v8::Array; using v8::ArrayBuffer; using v8::BackingStore; +using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Uint32; @@ -270,43 +272,54 @@ void CipherBase::MemoryInfo(MemoryTracker* tracker) const { } void CipherBase::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local context = env->context(); + + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( CipherBase::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "init", Init); - env->SetProtoMethod(t, "initiv", InitIv); - env->SetProtoMethod(t, "update", Update); - env->SetProtoMethod(t, "final", Final); - env->SetProtoMethod(t, "setAutoPadding", SetAutoPadding); - env->SetProtoMethodNoSideEffect(t, "getAuthTag", GetAuthTag); - env->SetProtoMethod(t, "setAuthTag", SetAuthTag); - env->SetProtoMethod(t, "setAAD", SetAAD); - env->SetConstructorFunction(target, "CipherBase", t); - - env->SetMethodNoSideEffect(target, "getSSLCiphers", GetSSLCiphers); - env->SetMethodNoSideEffect(target, "getCiphers", GetCiphers); - - env->SetMethod(target, "publicEncrypt", - PublicKeyCipher::Cipher); - env->SetMethod(target, "privateDecrypt", - PublicKeyCipher::Cipher); - env->SetMethod(target, "privateEncrypt", - PublicKeyCipher::Cipher); - env->SetMethod(target, "publicDecrypt", - PublicKeyCipher::Cipher); - - env->SetMethodNoSideEffect(target, "getCipherInfo", GetCipherInfo); + SetProtoMethod(isolate, t, "init", Init); + SetProtoMethod(isolate, t, "initiv", InitIv); + SetProtoMethod(isolate, t, "update", Update); + SetProtoMethod(isolate, t, "final", Final); + SetProtoMethod(isolate, t, "setAutoPadding", SetAutoPadding); + SetProtoMethodNoSideEffect(isolate, t, "getAuthTag", GetAuthTag); + SetProtoMethod(isolate, t, "setAuthTag", SetAuthTag); + SetProtoMethod(isolate, t, "setAAD", SetAAD); + SetConstructorFunction(context, target, "CipherBase", t); + + SetMethodNoSideEffect(context, target, "getSSLCiphers", GetSSLCiphers); + SetMethodNoSideEffect(context, target, "getCiphers", GetCiphers); + + SetMethod(context, + target, + "publicEncrypt", + PublicKeyCipher::Cipher); + SetMethod(context, + target, + "privateDecrypt", + PublicKeyCipher::Cipher); + SetMethod(context, + target, + "privateEncrypt", + PublicKeyCipher::Cipher); + SetMethod(context, + target, + "publicDecrypt", + PublicKeyCipher::Cipher); + + SetMethodNoSideEffect(context, target, "getCipherInfo", GetCipherInfo); NODE_DEFINE_CONSTANT(target, kWebCryptoCipherEncrypt); NODE_DEFINE_CONSTANT(target, kWebCryptoCipherDecrypt); diff --git a/src/crypto/crypto_context.cc b/src/crypto/crypto_context.cc index e2291f72b6622f..2265cfb63d0851 100644 --- a/src/crypto/crypto_context.cc +++ b/src/crypto/crypto_context.cc @@ -31,6 +31,7 @@ using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Object; using v8::PropertyAttribute; @@ -256,51 +257,52 @@ Local SecureContext::GetConstructorTemplate( Environment* env) { Local tmpl = env->secure_context_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, New); tmpl->InstanceTemplate()->SetInternalFieldCount( SecureContext::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "SecureContext")); - env->SetProtoMethod(tmpl, "init", Init); - env->SetProtoMethod(tmpl, "setKey", SetKey); - env->SetProtoMethod(tmpl, "setCert", SetCert); - env->SetProtoMethod(tmpl, "addCACert", AddCACert); - env->SetProtoMethod(tmpl, "addCRL", AddCRL); - env->SetProtoMethod(tmpl, "addRootCerts", AddRootCerts); - env->SetProtoMethod(tmpl, "setCipherSuites", SetCipherSuites); - env->SetProtoMethod(tmpl, "setCiphers", SetCiphers); - env->SetProtoMethod(tmpl, "setSigalgs", SetSigalgs); - env->SetProtoMethod(tmpl, "setECDHCurve", SetECDHCurve); - env->SetProtoMethod(tmpl, "setDHParam", SetDHParam); - env->SetProtoMethod(tmpl, "setMaxProto", SetMaxProto); - env->SetProtoMethod(tmpl, "setMinProto", SetMinProto); - env->SetProtoMethod(tmpl, "getMaxProto", GetMaxProto); - env->SetProtoMethod(tmpl, "getMinProto", GetMinProto); - env->SetProtoMethod(tmpl, "setOptions", SetOptions); - env->SetProtoMethod(tmpl, "setSessionIdContext", SetSessionIdContext); - env->SetProtoMethod(tmpl, "setSessionTimeout", SetSessionTimeout); - env->SetProtoMethod(tmpl, "close", Close); - env->SetProtoMethod(tmpl, "loadPKCS12", LoadPKCS12); - env->SetProtoMethod(tmpl, "setTicketKeys", SetTicketKeys); - env->SetProtoMethod(tmpl, "setFreeListLength", SetFreeListLength); - env->SetProtoMethod(tmpl, "enableTicketKeyCallback", - EnableTicketKeyCallback); - - env->SetProtoMethodNoSideEffect(tmpl, "getTicketKeys", GetTicketKeys); - env->SetProtoMethodNoSideEffect(tmpl, "getCertificate", - GetCertificate); - env->SetProtoMethodNoSideEffect(tmpl, "getIssuer", - GetCertificate); - - #ifndef OPENSSL_NO_ENGINE - env->SetProtoMethod(tmpl, "setEngineKey", SetEngineKey); - env->SetProtoMethod(tmpl, "setClientCertEngine", SetClientCertEngine); - #endif // !OPENSSL_NO_ENGINE - - #define SET_INTEGER_CONSTANTS(name, value) \ - tmpl->Set(FIXED_ONE_BYTE_STRING(env->isolate(), name), \ - Integer::NewFromUnsigned(env->isolate(), value)); + SetProtoMethod(isolate, tmpl, "init", Init); + SetProtoMethod(isolate, tmpl, "setKey", SetKey); + SetProtoMethod(isolate, tmpl, "setCert", SetCert); + SetProtoMethod(isolate, tmpl, "addCACert", AddCACert); + SetProtoMethod(isolate, tmpl, "addCRL", AddCRL); + SetProtoMethod(isolate, tmpl, "addRootCerts", AddRootCerts); + SetProtoMethod(isolate, tmpl, "setCipherSuites", SetCipherSuites); + SetProtoMethod(isolate, tmpl, "setCiphers", SetCiphers); + SetProtoMethod(isolate, tmpl, "setSigalgs", SetSigalgs); + SetProtoMethod(isolate, tmpl, "setECDHCurve", SetECDHCurve); + SetProtoMethod(isolate, tmpl, "setDHParam", SetDHParam); + SetProtoMethod(isolate, tmpl, "setMaxProto", SetMaxProto); + SetProtoMethod(isolate, tmpl, "setMinProto", SetMinProto); + SetProtoMethod(isolate, tmpl, "getMaxProto", GetMaxProto); + SetProtoMethod(isolate, tmpl, "getMinProto", GetMinProto); + SetProtoMethod(isolate, tmpl, "setOptions", SetOptions); + SetProtoMethod(isolate, tmpl, "setSessionIdContext", SetSessionIdContext); + SetProtoMethod(isolate, tmpl, "setSessionTimeout", SetSessionTimeout); + SetProtoMethod(isolate, tmpl, "close", Close); + SetProtoMethod(isolate, tmpl, "loadPKCS12", LoadPKCS12); + SetProtoMethod(isolate, tmpl, "setTicketKeys", SetTicketKeys); + SetProtoMethod(isolate, tmpl, "setFreeListLength", SetFreeListLength); + SetProtoMethod( + isolate, tmpl, "enableTicketKeyCallback", EnableTicketKeyCallback); + + SetProtoMethodNoSideEffect(isolate, tmpl, "getTicketKeys", GetTicketKeys); + SetProtoMethodNoSideEffect( + isolate, tmpl, "getCertificate", GetCertificate); + SetProtoMethodNoSideEffect( + isolate, tmpl, "getIssuer", GetCertificate); + +#ifndef OPENSSL_NO_ENGINE + SetProtoMethod(isolate, tmpl, "setEngineKey", SetEngineKey); + SetProtoMethod(isolate, tmpl, "setClientCertEngine", SetClientCertEngine); +#endif // !OPENSSL_NO_ENGINE + +#define SET_INTEGER_CONSTANTS(name, value) \ + tmpl->Set(FIXED_ONE_BYTE_STRING(isolate, name), \ + Integer::NewFromUnsigned(isolate, value)); SET_INTEGER_CONSTANTS("kTicketKeyReturnIndex", kTicketKeyReturnIndex); SET_INTEGER_CONSTANTS("kTicketKeyHMACIndex", kTicketKeyHMACIndex); SET_INTEGER_CONSTANTS("kTicketKeyAESIndex", kTicketKeyAESIndex); @@ -308,14 +310,11 @@ Local SecureContext::GetConstructorTemplate( SET_INTEGER_CONSTANTS("kTicketKeyIVIndex", kTicketKeyIVIndex); #undef SET_INTEGER_CONSTANTS - Local ctx_getter_templ = - FunctionTemplate::New(env->isolate(), - CtxGetter, - Local(), - Signature::New(env->isolate(), tmpl)); + Local ctx_getter_templ = FunctionTemplate::New( + isolate, CtxGetter, Local(), Signature::New(isolate, tmpl)); tmpl->PrototypeTemplate()->SetAccessorProperty( - FIXED_ONE_BYTE_STRING(env->isolate(), "_external"), + FIXED_ONE_BYTE_STRING(isolate, "_external"), ctx_getter_templ, Local(), static_cast(ReadOnly | DontDelete)); @@ -326,17 +325,20 @@ Local SecureContext::GetConstructorTemplate( } void SecureContext::Initialize(Environment* env, Local target) { - env->SetConstructorFunction( - target, - "SecureContext", - GetConstructorTemplate(env), - Environment::SetConstructorFunctionFlag::NONE); - - env->SetMethodNoSideEffect(target, "getRootCertificates", - GetRootCertificates); + Local context = env->context(); + SetConstructorFunction(context, + target, + "SecureContext", + GetConstructorTemplate(env), + SetConstructorFunctionFlag::NONE); + + SetMethodNoSideEffect( + context, target, "getRootCertificates", GetRootCertificates); // Exposed for testing purposes only. - env->SetMethodNoSideEffect(target, "isExtraRootCertsFileLoaded", - IsExtraRootCertsFileLoaded); + SetMethodNoSideEffect(context, + target, + "isExtraRootCertsFileLoaded", + IsExtraRootCertsFileLoaded); } void SecureContext::RegisterExternalReferences( diff --git a/src/crypto/crypto_dh.cc b/src/crypto/crypto_dh.cc index da1b810a66dc95..dd69323b80076d 100644 --- a/src/crypto/crypto_dh.cc +++ b/src/crypto/crypto_dh.cc @@ -14,12 +14,14 @@ namespace node { using v8::ArrayBuffer; using v8::BackingStore; using v8::ConstructorBehavior; +using v8::Context; using v8::DontDelete; using v8::FunctionCallback; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -57,8 +59,10 @@ DiffieHellman::DiffieHellman(Environment* env, Local wrap) } void DiffieHellman::Initialize(Environment* env, Local target) { - auto make = [&] (Local name, FunctionCallback callback) { - Local t = env->NewFunctionTemplate(callback); + Isolate* isolate = env->isolate(); + Local context = env->context(); + auto make = [&](Local name, FunctionCallback callback) { + Local t = NewFunctionTemplate(isolate, callback); const PropertyAttribute attributes = static_cast(ReadOnly | DontDelete); @@ -67,17 +71,17 @@ void DiffieHellman::Initialize(Environment* env, Local target) { DiffieHellman::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "generateKeys", GenerateKeys); - env->SetProtoMethod(t, "computeSecret", ComputeSecret); - env->SetProtoMethodNoSideEffect(t, "getPrime", GetPrime); - env->SetProtoMethodNoSideEffect(t, "getGenerator", GetGenerator); - env->SetProtoMethodNoSideEffect(t, "getPublicKey", GetPublicKey); - env->SetProtoMethodNoSideEffect(t, "getPrivateKey", GetPrivateKey); - env->SetProtoMethod(t, "setPublicKey", SetPublicKey); - env->SetProtoMethod(t, "setPrivateKey", SetPrivateKey); + SetProtoMethod(isolate, t, "generateKeys", GenerateKeys); + SetProtoMethod(isolate, t, "computeSecret", ComputeSecret); + SetProtoMethodNoSideEffect(isolate, t, "getPrime", GetPrime); + SetProtoMethodNoSideEffect(isolate, t, "getGenerator", GetGenerator); + SetProtoMethodNoSideEffect(isolate, t, "getPublicKey", GetPublicKey); + SetProtoMethodNoSideEffect(isolate, t, "getPrivateKey", GetPrivateKey); + SetProtoMethod(isolate, t, "setPublicKey", SetPublicKey); + SetProtoMethod(isolate, t, "setPrivateKey", SetPrivateKey); Local verify_error_getter_templ = - FunctionTemplate::New(env->isolate(), + FunctionTemplate::New(isolate, DiffieHellman::VerifyErrorGetter, Local(), Signature::New(env->isolate(), t), @@ -91,14 +95,15 @@ void DiffieHellman::Initialize(Environment* env, Local target) { Local(), attributes); - env->SetConstructorFunction(target, name, t); + SetConstructorFunction(context, target, name, t); }; make(FIXED_ONE_BYTE_STRING(env->isolate(), "DiffieHellman"), New); make(FIXED_ONE_BYTE_STRING(env->isolate(), "DiffieHellmanGroup"), DiffieHellmanGroup); - env->SetMethodNoSideEffect(target, "statelessDH", DiffieHellman::Stateless); + SetMethodNoSideEffect( + context, target, "statelessDH", DiffieHellman::Stateless); DHKeyPairGenJob::Initialize(env, target); DHKeyExportJob::Initialize(env, target); DHBitsJob::Initialize(env, target); diff --git a/src/crypto/crypto_ec.cc b/src/crypto/crypto_ec.cc index e6a90200a43483..0c14b09f638596 100644 --- a/src/crypto/crypto_ec.cc +++ b/src/crypto/crypto_ec.cc @@ -21,9 +21,11 @@ namespace node { using v8::Array; using v8::ArrayBuffer; using v8::BackingStore; +using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; +using v8::Isolate; using v8::Just; using v8::JustVoid; using v8::Local; @@ -60,22 +62,25 @@ int GetOKPCurveFromName(const char* name) { } void ECDH::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local context = env->context(); + + Local t = NewFunctionTemplate(isolate, New); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount(ECDH::kInternalFieldCount); - env->SetProtoMethod(t, "generateKeys", GenerateKeys); - env->SetProtoMethod(t, "computeSecret", ComputeSecret); - env->SetProtoMethodNoSideEffect(t, "getPublicKey", GetPublicKey); - env->SetProtoMethodNoSideEffect(t, "getPrivateKey", GetPrivateKey); - env->SetProtoMethod(t, "setPublicKey", SetPublicKey); - env->SetProtoMethod(t, "setPrivateKey", SetPrivateKey); + SetProtoMethod(isolate, t, "generateKeys", GenerateKeys); + SetProtoMethod(isolate, t, "computeSecret", ComputeSecret); + SetProtoMethodNoSideEffect(isolate, t, "getPublicKey", GetPublicKey); + SetProtoMethodNoSideEffect(isolate, t, "getPrivateKey", GetPrivateKey); + SetProtoMethod(isolate, t, "setPublicKey", SetPublicKey); + SetProtoMethod(isolate, t, "setPrivateKey", SetPrivateKey); - env->SetConstructorFunction(target, "ECDH", t); + SetConstructorFunction(context, target, "ECDH", t); - env->SetMethodNoSideEffect(target, "ECDHConvertKey", ECDH::ConvertKey); - env->SetMethodNoSideEffect(target, "getCurves", ECDH::GetCurves); + SetMethodNoSideEffect(context, target, "ECDHConvertKey", ECDH::ConvertKey); + SetMethodNoSideEffect(context, target, "getCurves", ECDH::GetCurves); ECDHBitsJob::Initialize(env, target); ECKeyPairGenJob::Initialize(env, target); diff --git a/src/crypto/crypto_hash.cc b/src/crypto/crypto_hash.cc index 24dc436d24855e..200603a85ef33e 100644 --- a/src/crypto/crypto_hash.cc +++ b/src/crypto/crypto_hash.cc @@ -11,8 +11,10 @@ namespace node { +using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -51,18 +53,20 @@ void Hash::GetHashes(const FunctionCallbackInfo& args) { } void Hash::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local context = env->context(); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( Hash::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "update", HashUpdate); - env->SetProtoMethod(t, "digest", HashDigest); + SetProtoMethod(isolate, t, "update", HashUpdate); + SetProtoMethod(isolate, t, "digest", HashDigest); - env->SetConstructorFunction(target, "Hash", t); + SetConstructorFunction(context, target, "Hash", t); - env->SetMethodNoSideEffect(target, "getHashes", GetHashes); + SetMethodNoSideEffect(context, target, "getHashes", GetHashes); HashJob::Initialize(env, target); } diff --git a/src/crypto/crypto_hmac.cc b/src/crypto/crypto_hmac.cc index afda92265e8dca..2e1c97ce48036d 100644 --- a/src/crypto/crypto_hmac.cc +++ b/src/crypto/crypto_hmac.cc @@ -16,6 +16,7 @@ namespace node { using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -37,17 +38,18 @@ void Hmac::MemoryInfo(MemoryTracker* tracker) const { } void Hmac::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( Hmac::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "init", HmacInit); - env->SetProtoMethod(t, "update", HmacUpdate); - env->SetProtoMethod(t, "digest", HmacDigest); + SetProtoMethod(isolate, t, "init", HmacInit); + SetProtoMethod(isolate, t, "update", HmacUpdate); + SetProtoMethod(isolate, t, "digest", HmacDigest); - env->SetConstructorFunction(target, "Hmac", t); + SetConstructorFunction(env->context(), target, "Hmac", t); HmacJob::Initialize(env, target); } diff --git a/src/crypto/crypto_keys.cc b/src/crypto/crypto_keys.cc index ba37f24c48dad5..27e3f4f8676cb0 100644 --- a/src/crypto/crypto_keys.cc +++ b/src/crypto/crypto_keys.cc @@ -24,6 +24,7 @@ using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -906,23 +907,24 @@ v8::Local KeyObjectHandle::Initialize(Environment* env) { if (!templ.IsEmpty()) { return templ; } - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( KeyObjectHandle::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "init", Init); - env->SetProtoMethodNoSideEffect(t, "getSymmetricKeySize", - GetSymmetricKeySize); - env->SetProtoMethodNoSideEffect(t, "getAsymmetricKeyType", - GetAsymmetricKeyType); - env->SetProtoMethod(t, "export", Export); - env->SetProtoMethod(t, "exportJwk", ExportJWK); - env->SetProtoMethod(t, "initECRaw", InitECRaw); - env->SetProtoMethod(t, "initEDRaw", InitEDRaw); - env->SetProtoMethod(t, "initJwk", InitJWK); - env->SetProtoMethod(t, "keyDetail", GetKeyDetail); - env->SetProtoMethod(t, "equals", Equals); + SetProtoMethod(isolate, t, "init", Init); + SetProtoMethodNoSideEffect( + isolate, t, "getSymmetricKeySize", GetSymmetricKeySize); + SetProtoMethodNoSideEffect( + isolate, t, "getAsymmetricKeyType", GetAsymmetricKeyType); + SetProtoMethod(isolate, t, "export", Export); + SetProtoMethod(isolate, t, "exportJwk", ExportJWK); + SetProtoMethod(isolate, t, "initECRaw", InitECRaw); + SetProtoMethod(isolate, t, "initEDRaw", InitEDRaw); + SetProtoMethod(isolate, t, "initJwk", InitJWK); + SetProtoMethod(isolate, t, "keyDetail", GetKeyDetail); + SetProtoMethod(isolate, t, "equals", Equals); auto function = t->GetFunction(env->context()).ToLocalChecked(); env->set_crypto_key_object_handle_constructor(function); @@ -1317,8 +1319,10 @@ void KeyObjectHandle::ExportJWK( } void NativeKeyObject::Initialize(Environment* env, Local target) { - env->SetMethod(target, "createNativeKeyObjectClass", - NativeKeyObject::CreateNativeKeyObjectClass); + SetMethod(env->context(), + target, + "createNativeKeyObjectClass", + NativeKeyObject::CreateNativeKeyObjectClass); } void NativeKeyObject::RegisterExternalReferences( @@ -1338,12 +1342,14 @@ void NativeKeyObject::New(const FunctionCallbackInfo& args) { void NativeKeyObject::CreateNativeKeyObjectClass( const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); + Isolate* isolate = env->isolate(); CHECK_EQ(args.Length(), 1); Local callback = args[0]; CHECK(callback->IsFunction()); - Local t = env->NewFunctionTemplate(NativeKeyObject::New); + Local t = + NewFunctionTemplate(isolate, NativeKeyObject::New); t->InstanceTemplate()->SetInternalFieldCount( KeyObjectHandle::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); diff --git a/src/crypto/crypto_sig.cc b/src/crypto/crypto_sig.cc index a72649ab8ec16f..23f7ec92ff3439 100644 --- a/src/crypto/crypto_sig.cc +++ b/src/crypto/crypto_sig.cc @@ -17,6 +17,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -325,17 +326,18 @@ Sign::Sign(Environment* env, Local wrap) : SignBase(env, wrap) { } void Sign::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( SignBase::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "init", SignInit); - env->SetProtoMethod(t, "update", SignUpdate); - env->SetProtoMethod(t, "sign", SignFinal); + SetProtoMethod(isolate, t, "init", SignInit); + SetProtoMethod(isolate, t, "update", SignUpdate); + SetProtoMethod(isolate, t, "sign", SignFinal); - env->SetConstructorFunction(target, "Sign", t); + SetConstructorFunction(env->context(), target, "Sign", t); SignJob::Initialize(env, target); @@ -454,17 +456,18 @@ Verify::Verify(Environment* env, Local wrap) } void Verify::Initialize(Environment* env, Local target) { - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( SignBase::kInternalFieldCount); t->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "init", VerifyInit); - env->SetProtoMethod(t, "update", VerifyUpdate); - env->SetProtoMethod(t, "verify", VerifyFinal); + SetProtoMethod(isolate, t, "init", VerifyInit); + SetProtoMethod(isolate, t, "update", VerifyUpdate); + SetProtoMethod(isolate, t, "verify", VerifyFinal); - env->SetConstructorFunction(target, "Verify", t); + SetConstructorFunction(env->context(), target, "Verify", t); } void Verify::RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/crypto/crypto_spkac.cc b/src/crypto/crypto_spkac.cc index 7cda346907e421..c4f43b8ced5ed7 100644 --- a/src/crypto/crypto_spkac.cc +++ b/src/crypto/crypto_spkac.cc @@ -8,6 +8,7 @@ namespace node { +using v8::Context; using v8::FunctionCallbackInfo; using v8::Local; using v8::Object; @@ -128,9 +129,12 @@ void ExportChallenge(const FunctionCallbackInfo& args) { } void Initialize(Environment* env, Local target) { - env->SetMethodNoSideEffect(target, "certVerifySpkac", VerifySpkac); - env->SetMethodNoSideEffect(target, "certExportPublicKey", ExportPublicKey); - env->SetMethodNoSideEffect(target, "certExportChallenge", ExportChallenge); + Local context = env->context(); + SetMethodNoSideEffect(context, target, "certVerifySpkac", VerifySpkac); + SetMethodNoSideEffect( + context, target, "certExportPublicKey", ExportPublicKey); + SetMethodNoSideEffect( + context, target, "certExportChallenge", ExportChallenge); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/crypto/crypto_timing.cc b/src/crypto/crypto_timing.cc index 3cc1a12ec009d6..8904f6b140dbb5 100644 --- a/src/crypto/crypto_timing.cc +++ b/src/crypto/crypto_timing.cc @@ -47,7 +47,8 @@ void TimingSafeEqual(const FunctionCallbackInfo& args) { } void Initialize(Environment* env, Local target) { - env->SetMethodNoSideEffect(target, "timingSafeEqual", TimingSafeEqual); + SetMethodNoSideEffect( + env->context(), target, "timingSafeEqual", TimingSafeEqual); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(TimingSafeEqual); diff --git a/src/crypto/crypto_tls.cc b/src/crypto/crypto_tls.cc index 6e6ef5251fca0e..849b21f28dfda8 100644 --- a/src/crypto/crypto_tls.cc +++ b/src/crypto/crypto_tls.cc @@ -2030,8 +2030,9 @@ void TLSWrap::Initialize( Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - env->SetMethod(target, "wrap", TLSWrap::Wrap); + SetMethod(context, target, "wrap", TLSWrap::Wrap); NODE_DEFINE_CONSTANT(target, HAVE_SSL_TRACE); @@ -2054,54 +2055,56 @@ void TLSWrap::Initialize( t->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "certCbDone", CertCbDone); - env->SetProtoMethod(t, "destroySSL", DestroySSL); - env->SetProtoMethod(t, "enableCertCb", EnableCertCb); - env->SetProtoMethod(t, "endParser", EndParser); - env->SetProtoMethod(t, "enableKeylogCallback", EnableKeylogCallback); - env->SetProtoMethod(t, "enableSessionCallbacks", EnableSessionCallbacks); - env->SetProtoMethod(t, "enableTrace", EnableTrace); - env->SetProtoMethod(t, "getServername", GetServername); - env->SetProtoMethod(t, "loadSession", LoadSession); - env->SetProtoMethod(t, "newSessionDone", NewSessionDone); - env->SetProtoMethod(t, "receive", Receive); - env->SetProtoMethod(t, "renegotiate", Renegotiate); - env->SetProtoMethod(t, "requestOCSP", RequestOCSP); - env->SetProtoMethod(t, "setALPNProtocols", SetALPNProtocols); - env->SetProtoMethod(t, "setOCSPResponse", SetOCSPResponse); - env->SetProtoMethod(t, "setServername", SetServername); - env->SetProtoMethod(t, "setSession", SetSession); - env->SetProtoMethod(t, "setVerifyMode", SetVerifyMode); - env->SetProtoMethod(t, "start", Start); - - env->SetProtoMethodNoSideEffect(t, "exportKeyingMaterial", - ExportKeyingMaterial); - env->SetProtoMethodNoSideEffect(t, "isSessionReused", IsSessionReused); - env->SetProtoMethodNoSideEffect(t, "getALPNNegotiatedProtocol", - GetALPNNegotiatedProto); - env->SetProtoMethodNoSideEffect(t, "getCertificate", GetCertificate); - env->SetProtoMethodNoSideEffect(t, "getX509Certificate", GetX509Certificate); - env->SetProtoMethodNoSideEffect(t, "getCipher", GetCipher); - env->SetProtoMethodNoSideEffect(t, "getEphemeralKeyInfo", - GetEphemeralKeyInfo); - env->SetProtoMethodNoSideEffect(t, "getFinished", GetFinished); - env->SetProtoMethodNoSideEffect(t, "getPeerCertificate", GetPeerCertificate); - env->SetProtoMethodNoSideEffect(t, "getPeerX509Certificate", - GetPeerX509Certificate); - env->SetProtoMethodNoSideEffect(t, "getPeerFinished", GetPeerFinished); - env->SetProtoMethodNoSideEffect(t, "getProtocol", GetProtocol); - env->SetProtoMethodNoSideEffect(t, "getSession", GetSession); - env->SetProtoMethodNoSideEffect(t, "getSharedSigalgs", GetSharedSigalgs); - env->SetProtoMethodNoSideEffect(t, "getTLSTicket", GetTLSTicket); - env->SetProtoMethodNoSideEffect(t, "verifyError", VerifyError); + SetProtoMethod(isolate, t, "certCbDone", CertCbDone); + SetProtoMethod(isolate, t, "destroySSL", DestroySSL); + SetProtoMethod(isolate, t, "enableCertCb", EnableCertCb); + SetProtoMethod(isolate, t, "endParser", EndParser); + SetProtoMethod(isolate, t, "enableKeylogCallback", EnableKeylogCallback); + SetProtoMethod(isolate, t, "enableSessionCallbacks", EnableSessionCallbacks); + SetProtoMethod(isolate, t, "enableTrace", EnableTrace); + SetProtoMethod(isolate, t, "getServername", GetServername); + SetProtoMethod(isolate, t, "loadSession", LoadSession); + SetProtoMethod(isolate, t, "newSessionDone", NewSessionDone); + SetProtoMethod(isolate, t, "receive", Receive); + SetProtoMethod(isolate, t, "renegotiate", Renegotiate); + SetProtoMethod(isolate, t, "requestOCSP", RequestOCSP); + SetProtoMethod(isolate, t, "setALPNProtocols", SetALPNProtocols); + SetProtoMethod(isolate, t, "setOCSPResponse", SetOCSPResponse); + SetProtoMethod(isolate, t, "setServername", SetServername); + SetProtoMethod(isolate, t, "setSession", SetSession); + SetProtoMethod(isolate, t, "setVerifyMode", SetVerifyMode); + SetProtoMethod(isolate, t, "start", Start); + + SetProtoMethodNoSideEffect( + isolate, t, "exportKeyingMaterial", ExportKeyingMaterial); + SetProtoMethodNoSideEffect(isolate, t, "isSessionReused", IsSessionReused); + SetProtoMethodNoSideEffect( + isolate, t, "getALPNNegotiatedProtocol", GetALPNNegotiatedProto); + SetProtoMethodNoSideEffect(isolate, t, "getCertificate", GetCertificate); + SetProtoMethodNoSideEffect( + isolate, t, "getX509Certificate", GetX509Certificate); + SetProtoMethodNoSideEffect(isolate, t, "getCipher", GetCipher); + SetProtoMethodNoSideEffect( + isolate, t, "getEphemeralKeyInfo", GetEphemeralKeyInfo); + SetProtoMethodNoSideEffect(isolate, t, "getFinished", GetFinished); + SetProtoMethodNoSideEffect( + isolate, t, "getPeerCertificate", GetPeerCertificate); + SetProtoMethodNoSideEffect( + isolate, t, "getPeerX509Certificate", GetPeerX509Certificate); + SetProtoMethodNoSideEffect(isolate, t, "getPeerFinished", GetPeerFinished); + SetProtoMethodNoSideEffect(isolate, t, "getProtocol", GetProtocol); + SetProtoMethodNoSideEffect(isolate, t, "getSession", GetSession); + SetProtoMethodNoSideEffect(isolate, t, "getSharedSigalgs", GetSharedSigalgs); + SetProtoMethodNoSideEffect(isolate, t, "getTLSTicket", GetTLSTicket); + SetProtoMethodNoSideEffect(isolate, t, "verifyError", VerifyError); #ifdef SSL_set_max_send_fragment - env->SetProtoMethod(t, "setMaxSendFragment", SetMaxSendFragment); + SetProtoMethod(isolate, t, "setMaxSendFragment", SetMaxSendFragment); #endif // SSL_set_max_send_fragment #ifndef OPENSSL_NO_PSK - env->SetProtoMethod(t, "enablePskCallback", EnablePskCallback); - env->SetProtoMethod(t, "setPskIdentityHint", SetPskIdentityHint); + SetProtoMethod(isolate, t, "enablePskCallback", EnablePskCallback); + SetProtoMethod(isolate, t, "setPskIdentityHint", SetPskIdentityHint); #endif // !OPENSSL_NO_PSK StreamBase::AddMethods(env, t); diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc index 5d8f0bbe8e3cb4..a804fe8dd071aa 100644 --- a/src/crypto/crypto_util.cc +++ b/src/crypto/crypto_util.cc @@ -712,19 +712,20 @@ void SecureHeapUsed(const FunctionCallbackInfo& args) { namespace Util { void Initialize(Environment* env, Local target) { + Local context = env->context(); #ifndef OPENSSL_NO_ENGINE - env->SetMethod(target, "setEngine", SetEngine); + SetMethod(context, target, "setEngine", SetEngine); #endif // !OPENSSL_NO_ENGINE - env->SetMethodNoSideEffect(target, "getFipsCrypto", GetFipsCrypto); - env->SetMethod(target, "setFipsCrypto", SetFipsCrypto); - env->SetMethodNoSideEffect(target, "testFipsCrypto", TestFipsCrypto); + SetMethodNoSideEffect(context, target, "getFipsCrypto", GetFipsCrypto); + SetMethod(context, target, "setFipsCrypto", SetFipsCrypto); + SetMethodNoSideEffect(context, target, "testFipsCrypto", TestFipsCrypto); NODE_DEFINE_CONSTANT(target, kCryptoJobAsync); NODE_DEFINE_CONSTANT(target, kCryptoJobSync); - env->SetMethod(target, "secureBuffer", SecureBuffer); - env->SetMethod(target, "secureHeapUsed", SecureHeapUsed); + SetMethod(context, target, "secureBuffer", SecureBuffer); + SetMethod(context, target, "secureHeapUsed", SecureHeapUsed); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { #ifndef OPENSSL_NO_ENGINE diff --git a/src/crypto/crypto_util.h b/src/crypto/crypto_util.h index 9e33b3777b00d7..69ada7995bd226 100644 --- a/src/crypto/crypto_util.h +++ b/src/crypto/crypto_util.h @@ -443,12 +443,15 @@ class CryptoJob : public AsyncWrap, public ThreadPoolWork { v8::FunctionCallback new_fn, Environment* env, v8::Local target) { - v8::Local job = env->NewFunctionTemplate(new_fn); + v8::Isolate* isolate = env->isolate(); + v8::HandleScope scope(isolate); + v8::Local context = env->context(); + v8::Local job = NewFunctionTemplate(isolate, new_fn); job->Inherit(AsyncWrap::GetConstructorTemplate(env)); job->InstanceTemplate()->SetInternalFieldCount( AsyncWrap::kInternalFieldCount); - env->SetProtoMethod(job, "run", Run); - env->SetConstructorFunction(target, CryptoJobTraits::JobName, job); + SetProtoMethod(isolate, job, "run", Run); + SetConstructorFunction(context, target, CryptoJobTraits::JobName, job); } static void RegisterExternalReferences(v8::FunctionCallback new_fn, diff --git a/src/crypto/crypto_x509.cc b/src/crypto/crypto_x509.cc index a301a1392152ec..3c30749c394655 100644 --- a/src/crypto/crypto_x509.cc +++ b/src/crypto/crypto_x509.cc @@ -21,6 +21,7 @@ using v8::EscapableHandleScope; using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -54,35 +55,36 @@ Local X509Certificate::GetConstructorTemplate( Environment* env) { Local tmpl = env->x509_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = FunctionTemplate::New(env->isolate()); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); tmpl->InstanceTemplate()->SetInternalFieldCount( BaseObject::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); tmpl->SetClassName( FIXED_ONE_BYTE_STRING(env->isolate(), "X509Certificate")); - env->SetProtoMethod(tmpl, "subject", Subject); - env->SetProtoMethod(tmpl, "subjectAltName", SubjectAltName); - env->SetProtoMethod(tmpl, "infoAccess", InfoAccess); - env->SetProtoMethod(tmpl, "issuer", Issuer); - env->SetProtoMethod(tmpl, "validTo", ValidTo); - env->SetProtoMethod(tmpl, "validFrom", ValidFrom); - env->SetProtoMethod(tmpl, "fingerprint", Fingerprint); - env->SetProtoMethod(tmpl, "fingerprint256", Fingerprint256); - env->SetProtoMethod(tmpl, "fingerprint512", Fingerprint512); - env->SetProtoMethod(tmpl, "keyUsage", KeyUsage); - env->SetProtoMethod(tmpl, "serialNumber", SerialNumber); - env->SetProtoMethod(tmpl, "pem", Pem); - env->SetProtoMethod(tmpl, "raw", Raw); - env->SetProtoMethod(tmpl, "publicKey", PublicKey); - env->SetProtoMethod(tmpl, "checkCA", CheckCA); - env->SetProtoMethod(tmpl, "checkHost", CheckHost); - env->SetProtoMethod(tmpl, "checkEmail", CheckEmail); - env->SetProtoMethod(tmpl, "checkIP", CheckIP); - env->SetProtoMethod(tmpl, "checkIssued", CheckIssued); - env->SetProtoMethod(tmpl, "checkPrivateKey", CheckPrivateKey); - env->SetProtoMethod(tmpl, "verify", Verify); - env->SetProtoMethod(tmpl, "toLegacy", ToLegacy); - env->SetProtoMethod(tmpl, "getIssuerCert", GetIssuerCert); + SetProtoMethod(isolate, tmpl, "subject", Subject); + SetProtoMethod(isolate, tmpl, "subjectAltName", SubjectAltName); + SetProtoMethod(isolate, tmpl, "infoAccess", InfoAccess); + SetProtoMethod(isolate, tmpl, "issuer", Issuer); + SetProtoMethod(isolate, tmpl, "validTo", ValidTo); + SetProtoMethod(isolate, tmpl, "validFrom", ValidFrom); + SetProtoMethod(isolate, tmpl, "fingerprint", Fingerprint); + SetProtoMethod(isolate, tmpl, "fingerprint256", Fingerprint256); + SetProtoMethod(isolate, tmpl, "fingerprint512", Fingerprint512); + SetProtoMethod(isolate, tmpl, "keyUsage", KeyUsage); + SetProtoMethod(isolate, tmpl, "serialNumber", SerialNumber); + SetProtoMethod(isolate, tmpl, "pem", Pem); + SetProtoMethod(isolate, tmpl, "raw", Raw); + SetProtoMethod(isolate, tmpl, "publicKey", PublicKey); + SetProtoMethod(isolate, tmpl, "checkCA", CheckCA); + SetProtoMethod(isolate, tmpl, "checkHost", CheckHost); + SetProtoMethod(isolate, tmpl, "checkEmail", CheckEmail); + SetProtoMethod(isolate, tmpl, "checkIP", CheckIP); + SetProtoMethod(isolate, tmpl, "checkIssued", CheckIssued); + SetProtoMethod(isolate, tmpl, "checkPrivateKey", CheckPrivateKey); + SetProtoMethod(isolate, tmpl, "verify", Verify); + SetProtoMethod(isolate, tmpl, "toLegacy", ToLegacy); + SetProtoMethod(isolate, tmpl, "getIssuerCert", GetIssuerCert); env->set_x509_constructor_template(tmpl); } return tmpl; @@ -543,7 +545,7 @@ std::unique_ptr X509Certificate::CloneForMessaging() void X509Certificate::Initialize(Environment* env, Local target) { - env->SetMethod(target, "parseX509", X509Certificate::Parse); + SetMethod(env->context(), target, "parseX509", X509Certificate::Parse); NODE_DEFINE_CONSTANT(target, X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT); NODE_DEFINE_CONSTANT(target, X509_CHECK_FLAG_NEVER_CHECK_SUBJECT); diff --git a/src/env.cc b/src/env.cc index 0a6140aa2157e3..e677c97f9a66ee 100644 --- a/src/env.cc +++ b/src/env.cc @@ -580,147 +580,6 @@ std::unique_ptr Environment::release_managed_buffer( return bs; } -Local Environment::NewFunctionTemplate( - v8::FunctionCallback callback, - Local signature, - v8::ConstructorBehavior behavior, - v8::SideEffectType side_effect_type, - const v8::CFunction* c_function) { - return v8::FunctionTemplate::New(isolate(), - callback, - Local(), - signature, - 0, - behavior, - side_effect_type, - c_function); -} - -void Environment::SetMethod(Local that, - const char* name, - v8::FunctionCallback callback) { - Local context = isolate()->GetCurrentContext(); - Local function = - NewFunctionTemplate(callback, - Local(), - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasSideEffect) - ->GetFunction(context) - .ToLocalChecked(); - // kInternalized strings are created in the old space. - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->Set(context, name_string, function).Check(); - function->SetName(name_string); // NODE_SET_METHOD() compatibility. -} - -void Environment::SetFastMethod(Local that, - const char* name, - v8::FunctionCallback slow_callback, - const v8::CFunction* c_function) { - Local context = isolate()->GetCurrentContext(); - Local function = - NewFunctionTemplate(slow_callback, - Local(), - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasNoSideEffect, - c_function) - ->GetFunction(context) - .ToLocalChecked(); - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->Set(context, name_string, function).Check(); -} - -void Environment::SetMethodNoSideEffect(Local that, - const char* name, - v8::FunctionCallback callback) { - Local context = isolate()->GetCurrentContext(); - Local function = - NewFunctionTemplate(callback, - Local(), - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasNoSideEffect) - ->GetFunction(context) - .ToLocalChecked(); - // kInternalized strings are created in the old space. - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->Set(context, name_string, function).Check(); - function->SetName(name_string); // NODE_SET_METHOD() compatibility. -} - -void Environment::SetProtoMethod(Local that, - const char* name, - v8::FunctionCallback callback) { - Local signature = v8::Signature::New(isolate(), that); - Local t = - NewFunctionTemplate(callback, - signature, - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasSideEffect); - // kInternalized strings are created in the old space. - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->PrototypeTemplate()->Set(name_string, t); - t->SetClassName(name_string); // NODE_SET_PROTOTYPE_METHOD() compatibility. -} - -void Environment::SetProtoMethodNoSideEffect(Local that, - const char* name, - v8::FunctionCallback callback) { - Local signature = v8::Signature::New(isolate(), that); - Local t = - NewFunctionTemplate(callback, - signature, - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasNoSideEffect); - // kInternalized strings are created in the old space. - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->PrototypeTemplate()->Set(name_string, t); - t->SetClassName(name_string); // NODE_SET_PROTOTYPE_METHOD() compatibility. -} - -void Environment::SetInstanceMethod(Local that, - const char* name, - v8::FunctionCallback callback) { - Local signature = v8::Signature::New(isolate(), that); - Local t = - NewFunctionTemplate(callback, - signature, - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasSideEffect); - // kInternalized strings are created in the old space. - const v8::NewStringType type = v8::NewStringType::kInternalized; - Local name_string = - v8::String::NewFromUtf8(isolate(), name, type).ToLocalChecked(); - that->InstanceTemplate()->Set(name_string, t); - t->SetClassName(name_string); -} - -void Environment::SetConstructorFunction(Local that, - const char* name, - Local tmpl, - SetConstructorFunctionFlag flag) { - SetConstructorFunction(that, OneByteString(isolate(), name), tmpl, flag); -} - -void Environment::SetConstructorFunction(Local that, - Local name, - Local tmpl, - SetConstructorFunctionFlag flag) { - if (LIKELY(flag == SetConstructorFunctionFlag::SET_CLASS_NAME)) - tmpl->SetClassName(name); - that->Set(context(), name, tmpl->GetFunction(context()).ToLocalChecked()) - .Check(); -} - void Environment::CreateProperties() { HandleScope handle_scope(isolate_); Local ctx = context(); @@ -2213,8 +2072,8 @@ void BaseObject::LazilyInitializedJSTemplateConstructor( Local BaseObject::MakeLazilyInitializedJSTemplate( Environment* env) { - Local t = - env->NewFunctionTemplate(LazilyInitializedJSTemplateConstructor); + Local t = NewFunctionTemplate( + env->isolate(), LazilyInitializedJSTemplateConstructor); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount(BaseObject::kInternalFieldCount); return t; @@ -2273,7 +2132,7 @@ bool BaseObject::IsRootNode() const { Local BaseObject::GetConstructorTemplate(Environment* env) { Local tmpl = env->base_object_ctor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(nullptr); + tmpl = NewFunctionTemplate(env->isolate(), nullptr); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "BaseObject")); env->set_base_object_ctor_template(tmpl); } diff --git a/src/env.h b/src/env.h index d38bc5338bc3e8..8ccad29448f01c 100644 --- a/src/env.h +++ b/src/env.h @@ -1284,56 +1284,6 @@ class Environment : public MemoryRetainer { const char* path = nullptr, const char* dest = nullptr); - v8::Local NewFunctionTemplate( - v8::FunctionCallback callback, - v8::Local signature = v8::Local(), - v8::ConstructorBehavior behavior = v8::ConstructorBehavior::kAllow, - v8::SideEffectType side_effect = v8::SideEffectType::kHasSideEffect, - const v8::CFunction* c_function = nullptr); - - // Convenience methods for NewFunctionTemplate(). - void SetMethod(v8::Local that, - const char* name, - v8::FunctionCallback callback); - - void SetFastMethod(v8::Local that, - const char* name, - v8::FunctionCallback slow_callback, - const v8::CFunction* c_function); - - void SetProtoMethod(v8::Local that, - const char* name, - v8::FunctionCallback callback); - - void SetInstanceMethod(v8::Local that, - const char* name, - v8::FunctionCallback callback); - - // Safe variants denote the function has no side effects. - void SetMethodNoSideEffect(v8::Local that, - const char* name, - v8::FunctionCallback callback); - void SetProtoMethodNoSideEffect(v8::Local that, - const char* name, - v8::FunctionCallback callback); - - enum class SetConstructorFunctionFlag { - NONE, - SET_CLASS_NAME, - }; - - void SetConstructorFunction(v8::Local that, - const char* name, - v8::Local tmpl, - SetConstructorFunctionFlag flag = - SetConstructorFunctionFlag::SET_CLASS_NAME); - - void SetConstructorFunction(v8::Local that, - v8::Local name, - v8::Local tmpl, - SetConstructorFunctionFlag flag = - SetConstructorFunctionFlag::SET_CLASS_NAME); - void AtExit(void (*cb)(void* arg), void* arg); void RunAtExitCallbacks(); diff --git a/src/fs_event_wrap.cc b/src/fs_event_wrap.cc index f13d22535c1015..7b4a3278028a96 100644 --- a/src/fs_event_wrap.cc +++ b/src/fs_event_wrap.cc @@ -35,6 +35,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -95,13 +96,14 @@ void FSEventWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( FSEventWrap::kInternalFieldCount); t->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "start", Start); + SetProtoMethod(isolate, t, "start", Start); Local get_initialized_templ = FunctionTemplate::New(env->isolate(), @@ -115,7 +117,7 @@ void FSEventWrap::Initialize(Local target, Local(), static_cast(ReadOnly | DontDelete | DontEnum)); - env->SetConstructorFunction(target, "FSEvent", t); + SetConstructorFunction(context, target, "FSEvent", t); } void FSEventWrap::RegisterExternalReferences( diff --git a/src/handle_wrap.cc b/src/handle_wrap.cc index caad0e0554622a..4ea6a67d57f8e2 100644 --- a/src/handle_wrap.cc +++ b/src/handle_wrap.cc @@ -31,6 +31,7 @@ using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; @@ -157,13 +158,14 @@ void HandleWrap::OnClose(uv_handle_t* handle) { Local HandleWrap::GetConstructorTemplate(Environment* env) { Local tmpl = env->handle_wrap_ctor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(nullptr); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "HandleWrap")); tmpl->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(tmpl, "close", HandleWrap::Close); - env->SetProtoMethodNoSideEffect(tmpl, "hasRef", HandleWrap::HasRef); - env->SetProtoMethod(tmpl, "ref", HandleWrap::Ref); - env->SetProtoMethod(tmpl, "unref", HandleWrap::Unref); + SetProtoMethod(isolate, tmpl, "close", HandleWrap::Close); + SetProtoMethodNoSideEffect(isolate, tmpl, "hasRef", HandleWrap::HasRef); + SetProtoMethod(isolate, tmpl, "ref", HandleWrap::Ref); + SetProtoMethod(isolate, tmpl, "unref", HandleWrap::Unref); env->set_handle_wrap_ctor_template(tmpl); } return tmpl; diff --git a/src/heap_utils.cc b/src/heap_utils.cc index 467e7ee5f82692..8556b58f151092 100644 --- a/src/heap_utils.cc +++ b/src/heap_utils.cc @@ -448,11 +448,10 @@ void Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - - env->SetMethod(target, "buildEmbedderGraph", BuildEmbedderGraph); - env->SetMethod(target, "triggerHeapSnapshot", TriggerHeapSnapshot); - env->SetMethod(target, "createHeapSnapshotStream", CreateHeapSnapshotStream); + SetMethod(context, target, "buildEmbedderGraph", BuildEmbedderGraph); + SetMethod(context, target, "triggerHeapSnapshot", TriggerHeapSnapshot); + SetMethod( + context, target, "createHeapSnapshotStream", CreateHeapSnapshotStream); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/histogram.cc b/src/histogram.cc index 63fa52a57df157..3a3228ddc9eb6b 100644 --- a/src/histogram.cc +++ b/src/histogram.cc @@ -11,6 +11,7 @@ using v8::BigInt; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Map; using v8::Number; @@ -280,7 +281,8 @@ Local HistogramBase::GetConstructorTemplate( Environment* env) { Local tmpl = env->histogram_ctor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, New); Local classname = FIXED_ONE_BYTE_STRING(env->isolate(), "Histogram"); tmpl->SetClassName(classname); @@ -288,26 +290,27 @@ Local HistogramBase::GetConstructorTemplate( tmpl->InstanceTemplate()->SetInternalFieldCount( HistogramBase::kInternalFieldCount); - env->SetProtoMethodNoSideEffect(tmpl, "count", GetCount); - env->SetProtoMethodNoSideEffect(tmpl, "countBigInt", GetCountBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "exceeds", GetExceeds); - env->SetProtoMethodNoSideEffect(tmpl, "exceedsBigInt", GetExceedsBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "min", GetMin); - env->SetProtoMethodNoSideEffect(tmpl, "minBigInt", GetMinBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "max", GetMax); - env->SetProtoMethodNoSideEffect(tmpl, "maxBigInt", GetMaxBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "mean", GetMean); - env->SetProtoMethodNoSideEffect(tmpl, "stddev", GetStddev); - env->SetProtoMethodNoSideEffect(tmpl, "percentile", GetPercentile); - env->SetProtoMethodNoSideEffect(tmpl, "percentileBigInt", - GetPercentileBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "percentiles", GetPercentiles); - env->SetProtoMethodNoSideEffect(tmpl, "percentilesBigInt", - GetPercentilesBigInt); - env->SetProtoMethod(tmpl, "reset", DoReset); - env->SetProtoMethod(tmpl, "record", Record); - env->SetProtoMethod(tmpl, "recordDelta", RecordDelta); - env->SetProtoMethod(tmpl, "add", Add); + SetProtoMethodNoSideEffect(isolate, tmpl, "count", GetCount); + SetProtoMethodNoSideEffect(isolate, tmpl, "countBigInt", GetCountBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "exceeds", GetExceeds); + SetProtoMethodNoSideEffect( + isolate, tmpl, "exceedsBigInt", GetExceedsBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "min", GetMin); + SetProtoMethodNoSideEffect(isolate, tmpl, "minBigInt", GetMinBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "max", GetMax); + SetProtoMethodNoSideEffect(isolate, tmpl, "maxBigInt", GetMaxBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "mean", GetMean); + SetProtoMethodNoSideEffect(isolate, tmpl, "stddev", GetStddev); + SetProtoMethodNoSideEffect(isolate, tmpl, "percentile", GetPercentile); + SetProtoMethodNoSideEffect( + isolate, tmpl, "percentileBigInt", GetPercentileBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "percentiles", GetPercentiles); + SetProtoMethodNoSideEffect( + isolate, tmpl, "percentilesBigInt", GetPercentilesBigInt); + SetProtoMethod(isolate, tmpl, "reset", DoReset); + SetProtoMethod(isolate, tmpl, "record", Record); + SetProtoMethod(isolate, tmpl, "recordDelta", RecordDelta); + SetProtoMethod(isolate, tmpl, "add", Add); env->set_histogram_ctor_template(tmpl); } return tmpl; @@ -337,7 +340,8 @@ void HistogramBase::RegisterExternalReferences( } void HistogramBase::Initialize(Environment* env, Local target) { - env->SetConstructorFunction(target, "Histogram", GetConstructorTemplate(env)); + SetConstructorFunction( + env->context(), target, "Histogram", GetConstructorTemplate(env)); } BaseObjectPtr HistogramBase::HistogramTransferData::Deserialize( @@ -360,29 +364,31 @@ Local IntervalHistogram::GetConstructorTemplate( Environment* env) { Local tmpl = env->intervalhistogram_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = FunctionTemplate::New(env->isolate()); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); tmpl->Inherit(HandleWrap::GetConstructorTemplate(env)); tmpl->InstanceTemplate()->SetInternalFieldCount( HistogramBase::kInternalFieldCount); - env->SetProtoMethodNoSideEffect(tmpl, "count", GetCount); - env->SetProtoMethodNoSideEffect(tmpl, "countBigInt", GetCountBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "exceeds", GetExceeds); - env->SetProtoMethodNoSideEffect(tmpl, "exceedsBigInt", GetExceedsBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "min", GetMin); - env->SetProtoMethodNoSideEffect(tmpl, "minBigInt", GetMinBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "max", GetMax); - env->SetProtoMethodNoSideEffect(tmpl, "maxBigInt", GetMaxBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "mean", GetMean); - env->SetProtoMethodNoSideEffect(tmpl, "stddev", GetStddev); - env->SetProtoMethodNoSideEffect(tmpl, "percentile", GetPercentile); - env->SetProtoMethodNoSideEffect(tmpl, "percentileBigInt", - GetPercentileBigInt); - env->SetProtoMethodNoSideEffect(tmpl, "percentiles", GetPercentiles); - env->SetProtoMethodNoSideEffect(tmpl, "percentilesBigInt", - GetPercentilesBigInt); - env->SetProtoMethod(tmpl, "reset", DoReset); - env->SetProtoMethod(tmpl, "start", Start); - env->SetProtoMethod(tmpl, "stop", Stop); + SetProtoMethodNoSideEffect(isolate, tmpl, "count", GetCount); + SetProtoMethodNoSideEffect(isolate, tmpl, "countBigInt", GetCountBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "exceeds", GetExceeds); + SetProtoMethodNoSideEffect( + isolate, tmpl, "exceedsBigInt", GetExceedsBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "min", GetMin); + SetProtoMethodNoSideEffect(isolate, tmpl, "minBigInt", GetMinBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "max", GetMax); + SetProtoMethodNoSideEffect(isolate, tmpl, "maxBigInt", GetMaxBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "mean", GetMean); + SetProtoMethodNoSideEffect(isolate, tmpl, "stddev", GetStddev); + SetProtoMethodNoSideEffect(isolate, tmpl, "percentile", GetPercentile); + SetProtoMethodNoSideEffect( + isolate, tmpl, "percentileBigInt", GetPercentileBigInt); + SetProtoMethodNoSideEffect(isolate, tmpl, "percentiles", GetPercentiles); + SetProtoMethodNoSideEffect( + isolate, tmpl, "percentilesBigInt", GetPercentilesBigInt); + SetProtoMethod(isolate, tmpl, "reset", DoReset); + SetProtoMethod(isolate, tmpl, "start", Start); + SetProtoMethod(isolate, tmpl, "stop", Stop); env->set_intervalhistogram_constructor_template(tmpl); } return tmpl; diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 298066dfc759ba..dd05a5b9c3ffd0 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -102,17 +102,17 @@ class JSBindingsConnection : public AsyncWrap { } static void Bind(Environment* env, Local target) { + Isolate* isolate = env->isolate(); Local tmpl = - env->NewFunctionTemplate(JSBindingsConnection::New); + NewFunctionTemplate(isolate, JSBindingsConnection::New); tmpl->InstanceTemplate()->SetInternalFieldCount( JSBindingsConnection::kInternalFieldCount); tmpl->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(tmpl, "dispatch", JSBindingsConnection::Dispatch); - env->SetProtoMethod(tmpl, "disconnect", JSBindingsConnection::Disconnect); - env->SetConstructorFunction( - target, - ConnectionType::GetClassName(env), - tmpl); + SetProtoMethod(isolate, tmpl, "dispatch", JSBindingsConnection::Dispatch); + SetProtoMethod( + isolate, tmpl, "disconnect", JSBindingsConnection::Disconnect); + SetConstructorFunction( + env->context(), target, ConnectionType::GetClassName(env), tmpl); } static void New(const FunctionCallbackInfo& info) { @@ -314,37 +314,47 @@ void Url(const FunctionCallbackInfo& args) { void Initialize(Local target, Local unused, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); v8::Local consoleCallFunc = - env->NewFunctionTemplate(InspectorConsoleCall, v8::Local(), - v8::ConstructorBehavior::kThrow, - v8::SideEffectType::kHasSideEffect) + NewFunctionTemplate(isolate, + InspectorConsoleCall, + v8::Local(), + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect) ->GetFunction(context) .ToLocalChecked(); - auto name_string = FIXED_ONE_BYTE_STRING(env->isolate(), "consoleCall"); + auto name_string = FIXED_ONE_BYTE_STRING(isolate, "consoleCall"); target->Set(context, name_string, consoleCallFunc).Check(); consoleCallFunc->SetName(name_string); - env->SetMethod( - target, "setConsoleExtensionInstaller", SetConsoleExtensionInstaller); - env->SetMethod(target, "callAndPauseOnStart", CallAndPauseOnStart); - env->SetMethod(target, "open", Open); - env->SetMethodNoSideEffect(target, "url", Url); - env->SetMethod(target, "waitForDebugger", WaitForDebugger); - - env->SetMethod(target, "asyncTaskScheduled", AsyncTaskScheduledWrapper); - env->SetMethod(target, "asyncTaskCanceled", - InvokeAsyncTaskFnWithId<&Agent::AsyncTaskCanceled>); - env->SetMethod(target, "asyncTaskStarted", - InvokeAsyncTaskFnWithId<&Agent::AsyncTaskStarted>); - env->SetMethod(target, "asyncTaskFinished", - InvokeAsyncTaskFnWithId<&Agent::AsyncTaskFinished>); - - env->SetMethod(target, "registerAsyncHook", RegisterAsyncHookWrapper); - env->SetMethodNoSideEffect(target, "isEnabled", IsEnabled); - - Local console_string = - FIXED_ONE_BYTE_STRING(env->isolate(), "console"); + SetMethod(context, + target, + "setConsoleExtensionInstaller", + SetConsoleExtensionInstaller); + SetMethod(context, target, "callAndPauseOnStart", CallAndPauseOnStart); + SetMethod(context, target, "open", Open); + SetMethodNoSideEffect(context, target, "url", Url); + SetMethod(context, target, "waitForDebugger", WaitForDebugger); + + SetMethod(context, target, "asyncTaskScheduled", AsyncTaskScheduledWrapper); + SetMethod(context, + target, + "asyncTaskCanceled", + InvokeAsyncTaskFnWithId<&Agent::AsyncTaskCanceled>); + SetMethod(context, + target, + "asyncTaskStarted", + InvokeAsyncTaskFnWithId<&Agent::AsyncTaskStarted>); + SetMethod(context, + target, + "asyncTaskFinished", + InvokeAsyncTaskFnWithId<&Agent::AsyncTaskFinished>); + + SetMethod(context, target, "registerAsyncHook", RegisterAsyncHookWrapper); + SetMethodNoSideEffect(context, target, "isEnabled", IsEnabled); + + Local console_string = FIXED_ONE_BYTE_STRING(isolate, "console"); // Grab the console from the binding object and expose those to our binding // layer. diff --git a/src/inspector_profiler.cc b/src/inspector_profiler.cc index 332aa536364fba..538125f091a323 100644 --- a/src/inspector_profiler.cc +++ b/src/inspector_profiler.cc @@ -507,11 +507,11 @@ static void Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "setCoverageDirectory", SetCoverageDirectory); - env->SetMethod(target, "setSourceMapCacheGetter", SetSourceMapCacheGetter); - env->SetMethod(target, "takeCoverage", TakeCoverage); - env->SetMethod(target, "stopCoverage", StopCoverage); + SetMethod(context, target, "setCoverageDirectory", SetCoverageDirectory); + SetMethod( + context, target, "setSourceMapCacheGetter", SetSourceMapCacheGetter); + SetMethod(context, target, "takeCoverage", TakeCoverage); + SetMethod(context, target, "stopCoverage", StopCoverage); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/js_stream.cc b/src/js_stream.cc index 720008ecefcb48..5bf64a9e0a928b 100644 --- a/src/js_stream.cc +++ b/src/js_stream.cc @@ -17,6 +17,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; @@ -197,19 +198,20 @@ void JSStream::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate() ->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "finishWrite", Finish); - env->SetProtoMethod(t, "finishShutdown", Finish); - env->SetProtoMethod(t, "readBuffer", ReadBuffer); - env->SetProtoMethod(t, "emitEOF", EmitEOF); + SetProtoMethod(isolate, t, "finishWrite", Finish); + SetProtoMethod(isolate, t, "finishShutdown", Finish); + SetProtoMethod(isolate, t, "readBuffer", ReadBuffer); + SetProtoMethod(isolate, t, "emitEOF", EmitEOF); StreamBase::AddMethods(env, t); - env->SetConstructorFunction(target, "JSStream", t); + SetConstructorFunction(context, target, "JSStream", t); } } // namespace node diff --git a/src/js_udp_wrap.cc b/src/js_udp_wrap.cc index 3f02771ee1a907..99362ccc6097d5 100644 --- a/src/js_udp_wrap.cc +++ b/src/js_udp_wrap.cc @@ -17,6 +17,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; @@ -198,18 +199,19 @@ void JSUDPWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate() ->SetInternalFieldCount(UDPWrapBase::kUDPWrapBaseField + 1); t->Inherit(AsyncWrap::GetConstructorTemplate(env)); UDPWrapBase::AddMethods(env, t); - env->SetProtoMethod(t, "emitReceived", EmitReceived); - env->SetProtoMethod(t, "onSendDone", OnSendDone); - env->SetProtoMethod(t, "onAfterBind", OnAfterBind); + SetProtoMethod(isolate, t, "emitReceived", EmitReceived); + SetProtoMethod(isolate, t, "onSendDone", OnSendDone); + SetProtoMethod(isolate, t, "onAfterBind", OnAfterBind); - env->SetConstructorFunction(target, "JSUDPWrap", t); + SetConstructorFunction(context, target, "JSUDPWrap", t); } diff --git a/src/module_wrap.cc b/src/module_wrap.cc index 50ce8d510cb1a4..b445189d388036 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -765,31 +765,37 @@ void ModuleWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local tpl = env->NewFunctionTemplate(New); + Local tpl = NewFunctionTemplate(isolate, New); tpl->InstanceTemplate()->SetInternalFieldCount( ModuleWrap::kInternalFieldCount); tpl->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(tpl, "link", Link); - env->SetProtoMethod(tpl, "instantiate", Instantiate); - env->SetProtoMethod(tpl, "evaluate", Evaluate); - env->SetProtoMethod(tpl, "setExport", SetSyntheticExport); - env->SetProtoMethodNoSideEffect(tpl, "createCachedData", CreateCachedData); - env->SetProtoMethodNoSideEffect(tpl, "getNamespace", GetNamespace); - env->SetProtoMethodNoSideEffect(tpl, "getStatus", GetStatus); - env->SetProtoMethodNoSideEffect(tpl, "getError", GetError); - env->SetProtoMethodNoSideEffect(tpl, "getStaticDependencySpecifiers", - GetStaticDependencySpecifiers); - - env->SetConstructorFunction(target, "ModuleWrap", tpl); - - env->SetMethod(target, - "setImportModuleDynamicallyCallback", - SetImportModuleDynamicallyCallback); - env->SetMethod(target, - "setInitializeImportMetaObjectCallback", - SetInitializeImportMetaObjectCallback); + SetProtoMethod(isolate, tpl, "link", Link); + SetProtoMethod(isolate, tpl, "instantiate", Instantiate); + SetProtoMethod(isolate, tpl, "evaluate", Evaluate); + SetProtoMethod(isolate, tpl, "setExport", SetSyntheticExport); + SetProtoMethodNoSideEffect( + isolate, tpl, "createCachedData", CreateCachedData); + SetProtoMethodNoSideEffect(isolate, tpl, "getNamespace", GetNamespace); + SetProtoMethodNoSideEffect(isolate, tpl, "getStatus", GetStatus); + SetProtoMethodNoSideEffect(isolate, tpl, "getError", GetError); + SetProtoMethodNoSideEffect(isolate, + tpl, + "getStaticDependencySpecifiers", + GetStaticDependencySpecifiers); + + SetConstructorFunction(context, target, "ModuleWrap", tpl); + + SetMethod(context, + target, + "setImportModuleDynamicallyCallback", + SetImportModuleDynamicallyCallback); + SetMethod(context, + target, + "setInitializeImportMetaObjectCallback", + SetInitializeImportMetaObjectCallback); #define V(name) \ target->Set(context, \ diff --git a/src/node.cc b/src/node.cc index 40a0decc3f2bf1..fb02398df18cef 100644 --- a/src/node.cc +++ b/src/node.cc @@ -309,10 +309,10 @@ MaybeLocal Environment::BootstrapInternalLoaders() { primordials_string()}; std::vector> loaders_args = { process_object(), - NewFunctionTemplate(binding::GetLinkedBinding) + NewFunctionTemplate(isolate_, binding::GetLinkedBinding) ->GetFunction(context()) .ToLocalChecked(), - NewFunctionTemplate(binding::GetInternalBinding) + NewFunctionTemplate(isolate_, binding::GetInternalBinding) ->GetFunction(context()) .ToLocalChecked(), primordials()}; @@ -451,7 +451,7 @@ MaybeLocal StartExecution(Environment* env, const char* main_script_id) { env->native_module_require(), env->internal_binding_loader(), env->primordials(), - env->NewFunctionTemplate(MarkBootstrapComplete) + NewFunctionTemplate(env->isolate(), MarkBootstrapComplete) ->GetFunction(env->context()) .ToLocalChecked()}; diff --git a/src/node_blob.cc b/src/node_blob.cc index e1bf3be2b07b9e..b319a74ebaedbf 100644 --- a/src/node_blob.cc +++ b/src/node_blob.cc @@ -22,6 +22,7 @@ using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Number; @@ -42,24 +43,25 @@ void Blob::Initialize( env->AddBindingData(context, target); if (binding_data == nullptr) return; - env->SetMethod(target, "createBlob", New); - env->SetMethod(target, "storeDataObject", StoreDataObject); - env->SetMethod(target, "getDataObject", GetDataObject); - env->SetMethod(target, "revokeDataObject", RevokeDataObject); + SetMethod(context, target, "createBlob", New); + SetMethod(context, target, "storeDataObject", StoreDataObject); + SetMethod(context, target, "getDataObject", GetDataObject); + SetMethod(context, target, "revokeDataObject", RevokeDataObject); FixedSizeBlobCopyJob::Initialize(env, target); } Local Blob::GetConstructorTemplate(Environment* env) { Local tmpl = env->blob_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = FunctionTemplate::New(env->isolate()); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); tmpl->InstanceTemplate()->SetInternalFieldCount( BaseObject::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); tmpl->SetClassName( FIXED_ONE_BYTE_STRING(env->isolate(), "Blob")); - env->SetProtoMethod(tmpl, "toArrayBuffer", ToArrayBuffer); - env->SetProtoMethod(tmpl, "slice", ToSlice); + SetProtoMethod(isolate, tmpl, "toArrayBuffer", ToArrayBuffer); + SetProtoMethod(isolate, tmpl, "slice", ToSlice); env->set_blob_constructor_template(tmpl); } return tmpl; @@ -362,12 +364,13 @@ void FixedSizeBlobCopyJob::MemoryInfo(MemoryTracker* tracker) const { } void FixedSizeBlobCopyJob::Initialize(Environment* env, Local target) { - v8::Local job = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + v8::Local job = NewFunctionTemplate(isolate, New); job->Inherit(AsyncWrap::GetConstructorTemplate(env)); job->InstanceTemplate()->SetInternalFieldCount( AsyncWrap::kInternalFieldCount); - env->SetProtoMethod(job, "run", Run); - env->SetConstructorFunction(target, "FixedSizeBlobCopyJob", job); + SetProtoMethod(isolate, job, "run", Run); + SetConstructorFunction(env->context(), target, "FixedSizeBlobCopyJob", job); } void FixedSizeBlobCopyJob::New(const FunctionCallbackInfo& args) { diff --git a/src/node_buffer.cc b/src/node_buffer.cc index aec97f15e2c809..f8885ab3771b12 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -1275,54 +1275,60 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - env->SetMethod(target, "setBufferPrototype", SetBufferPrototype); - env->SetMethodNoSideEffect(target, "createFromString", CreateFromString); - - env->SetMethodNoSideEffect(target, "byteLengthUtf8", ByteLengthUtf8); - env->SetMethod(target, "copy", Copy); - env->SetMethodNoSideEffect(target, "compare", Compare); - env->SetMethodNoSideEffect(target, "compareOffset", CompareOffset); - env->SetMethod(target, "fill", Fill); - env->SetMethodNoSideEffect(target, "indexOfBuffer", IndexOfBuffer); - env->SetMethodNoSideEffect(target, "indexOfNumber", IndexOfNumber); - env->SetMethodNoSideEffect(target, "indexOfString", IndexOfString); - - env->SetMethod(target, "detachArrayBuffer", DetachArrayBuffer); - env->SetMethod(target, "copyArrayBuffer", CopyArrayBuffer); - - env->SetMethod(target, "swap16", Swap16); - env->SetMethod(target, "swap32", Swap32); - env->SetMethod(target, "swap64", Swap64); - - env->SetMethod(target, "encodeInto", EncodeInto); - env->SetMethodNoSideEffect(target, "encodeUtf8String", EncodeUtf8String); - - target->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "kMaxLength"), - Number::New(env->isolate(), kMaxLength)).Check(); - - target->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "kStringMaxLength"), - Integer::New(env->isolate(), String::kMaxLength)).Check(); - - env->SetMethodNoSideEffect(target, "asciiSlice", StringSlice); - env->SetMethodNoSideEffect(target, "base64Slice", StringSlice); - env->SetMethodNoSideEffect(target, "base64urlSlice", StringSlice); - env->SetMethodNoSideEffect(target, "latin1Slice", StringSlice); - env->SetMethodNoSideEffect(target, "hexSlice", StringSlice); - env->SetMethodNoSideEffect(target, "ucs2Slice", StringSlice); - env->SetMethodNoSideEffect(target, "utf8Slice", StringSlice); - - env->SetMethod(target, "asciiWrite", StringWrite); - env->SetMethod(target, "base64Write", StringWrite); - env->SetMethod(target, "base64urlWrite", StringWrite); - env->SetMethod(target, "latin1Write", StringWrite); - env->SetMethod(target, "hexWrite", StringWrite); - env->SetMethod(target, "ucs2Write", StringWrite); - env->SetMethod(target, "utf8Write", StringWrite); - - env->SetMethod(target, "getZeroFillToggle", GetZeroFillToggle); + SetMethod(context, target, "setBufferPrototype", SetBufferPrototype); + SetMethodNoSideEffect(context, target, "createFromString", CreateFromString); + + SetMethodNoSideEffect(context, target, "byteLengthUtf8", ByteLengthUtf8); + SetMethod(context, target, "copy", Copy); + SetMethodNoSideEffect(context, target, "compare", Compare); + SetMethodNoSideEffect(context, target, "compareOffset", CompareOffset); + SetMethod(context, target, "fill", Fill); + SetMethodNoSideEffect(context, target, "indexOfBuffer", IndexOfBuffer); + SetMethodNoSideEffect(context, target, "indexOfNumber", IndexOfNumber); + SetMethodNoSideEffect(context, target, "indexOfString", IndexOfString); + + SetMethod(context, target, "detachArrayBuffer", DetachArrayBuffer); + SetMethod(context, target, "copyArrayBuffer", CopyArrayBuffer); + + SetMethod(context, target, "swap16", Swap16); + SetMethod(context, target, "swap32", Swap32); + SetMethod(context, target, "swap64", Swap64); + + SetMethod(context, target, "encodeInto", EncodeInto); + SetMethodNoSideEffect(context, target, "encodeUtf8String", EncodeUtf8String); + + target + ->Set(context, + FIXED_ONE_BYTE_STRING(isolate, "kMaxLength"), + Number::New(isolate, kMaxLength)) + .Check(); + + target + ->Set(context, + FIXED_ONE_BYTE_STRING(isolate, "kStringMaxLength"), + Integer::New(isolate, String::kMaxLength)) + .Check(); + + SetMethodNoSideEffect(context, target, "asciiSlice", StringSlice); + SetMethodNoSideEffect(context, target, "base64Slice", StringSlice); + SetMethodNoSideEffect( + context, target, "base64urlSlice", StringSlice); + SetMethodNoSideEffect(context, target, "latin1Slice", StringSlice); + SetMethodNoSideEffect(context, target, "hexSlice", StringSlice); + SetMethodNoSideEffect(context, target, "ucs2Slice", StringSlice); + SetMethodNoSideEffect(context, target, "utf8Slice", StringSlice); + + SetMethod(context, target, "asciiWrite", StringWrite); + SetMethod(context, target, "base64Write", StringWrite); + SetMethod(context, target, "base64urlWrite", StringWrite); + SetMethod(context, target, "latin1Write", StringWrite); + SetMethod(context, target, "hexWrite", StringWrite); + SetMethod(context, target, "ucs2Write", StringWrite); + SetMethod(context, target, "utf8Write", StringWrite); + + SetMethod(context, target, "getZeroFillToggle", GetZeroFillToggle); } } // anonymous namespace diff --git a/src/node_contextify.cc b/src/node_contextify.cc index f383b6def93bbe..95ba72c3db268c 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -255,16 +255,19 @@ MaybeLocal ContextifyContext::CreateV8Context( void ContextifyContext::Init(Environment* env, Local target) { + Isolate* isolate = env->isolate(); + Local context = env->context(); + Local function_template = - FunctionTemplate::New(env->isolate()); + NewFunctionTemplate(isolate, nullptr); function_template->InstanceTemplate()->SetInternalFieldCount( ContextifyContext::kInternalFieldCount); env->set_script_data_constructor_function( function_template->GetFunction(env->context()).ToLocalChecked()); - env->SetMethod(target, "makeContext", MakeContext); - env->SetMethod(target, "isContext", IsContext); - env->SetMethod(target, "compileFunction", CompileFunction); + SetMethod(context, target, "makeContext", MakeContext); + SetMethod(context, target, "isContext", IsContext); + SetMethod(context, target, "compileFunction", CompileFunction); } void ContextifyContext::RegisterExternalReferences( @@ -657,16 +660,17 @@ void ContextifyContext::IndexedPropertyDeleterCallback( } void ContextifyScript::Init(Environment* env, Local target) { + Isolate* isolate = env->isolate(); HandleScope scope(env->isolate()); Local class_name = FIXED_ONE_BYTE_STRING(env->isolate(), "ContextifyScript"); - Local script_tmpl = env->NewFunctionTemplate(New); + Local script_tmpl = NewFunctionTemplate(isolate, New); script_tmpl->InstanceTemplate()->SetInternalFieldCount( ContextifyScript::kInternalFieldCount); script_tmpl->SetClassName(class_name); - env->SetProtoMethod(script_tmpl, "createCachedData", CreateCachedData); - env->SetProtoMethod(script_tmpl, "runInContext", RunInContext); + SetProtoMethod(isolate, script_tmpl, "createCachedData", CreateCachedData); + SetProtoMethod(isolate, script_tmpl, "runInContext", RunInContext); Local context = env->context(); @@ -1262,12 +1266,14 @@ void MicrotaskQueueWrap::New(const FunctionCallbackInfo& args) { } void MicrotaskQueueWrap::Init(Environment* env, Local target) { - HandleScope scope(env->isolate()); - Local tmpl = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + HandleScope scope(isolate); + Local context = env->context(); + Local tmpl = NewFunctionTemplate(isolate, New); tmpl->InstanceTemplate()->SetInternalFieldCount( ContextifyScript::kInternalFieldCount); env->set_microtask_queue_ctor_template(tmpl); - env->SetConstructorFunction(target, "MicrotaskQueue", tmpl); + SetConstructorFunction(context, target, "MicrotaskQueue", tmpl); } void MicrotaskQueueWrap::RegisterExternalReferences( @@ -1285,11 +1291,11 @@ void Initialize(Local target, ContextifyScript::Init(env, target); MicrotaskQueueWrap::Init(env, target); - env->SetMethod(target, "startSigintWatchdog", StartSigintWatchdog); - env->SetMethod(target, "stopSigintWatchdog", StopSigintWatchdog); + SetMethod(context, target, "startSigintWatchdog", StartSigintWatchdog); + SetMethod(context, target, "stopSigintWatchdog", StopSigintWatchdog); // Used in tests. - env->SetMethodNoSideEffect( - target, "watchdogHasPendingSigint", WatchdogHasPendingSigint); + SetMethodNoSideEffect( + context, target, "watchdogHasPendingSigint", WatchdogHasPendingSigint); { Local tpl = FunctionTemplate::New(env->isolate()); @@ -1325,7 +1331,7 @@ void Initialize(Local target, target->Set(context, env->constants_string(), constants).Check(); - env->SetMethod(target, "measureMemory", MeasureMemory); + SetMethod(context, target, "measureMemory", MeasureMemory); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_credentials.cc b/src/node_credentials.cc index d7471eb0d4eda9..458fa13217e329 100644 --- a/src/node_credentials.cc +++ b/src/node_credentials.cc @@ -448,23 +448,23 @@ static void Initialize(Local target, Environment* env = Environment::GetCurrent(context); Isolate* isolate = env->isolate(); - env->SetMethod(target, "safeGetenv", SafeGetenv); + SetMethod(context, target, "safeGetenv", SafeGetenv); #ifdef NODE_IMPLEMENTS_POSIX_CREDENTIALS READONLY_TRUE_PROPERTY(target, "implementsPosixCredentials"); - env->SetMethodNoSideEffect(target, "getuid", GetUid); - env->SetMethodNoSideEffect(target, "geteuid", GetEUid); - env->SetMethodNoSideEffect(target, "getgid", GetGid); - env->SetMethodNoSideEffect(target, "getegid", GetEGid); - env->SetMethodNoSideEffect(target, "getgroups", GetGroups); + SetMethodNoSideEffect(context, target, "getuid", GetUid); + SetMethodNoSideEffect(context, target, "geteuid", GetEUid); + SetMethodNoSideEffect(context, target, "getgid", GetGid); + SetMethodNoSideEffect(context, target, "getegid", GetEGid); + SetMethodNoSideEffect(context, target, "getgroups", GetGroups); if (env->owns_process_state()) { - env->SetMethod(target, "initgroups", InitGroups); - env->SetMethod(target, "setegid", SetEGid); - env->SetMethod(target, "seteuid", SetEUid); - env->SetMethod(target, "setgid", SetGid); - env->SetMethod(target, "setuid", SetUid); - env->SetMethod(target, "setgroups", SetGroups); + SetMethod(context, target, "initgroups", InitGroups); + SetMethod(context, target, "setegid", SetEGid); + SetMethod(context, target, "seteuid", SetEUid); + SetMethod(context, target, "setgid", SetGid); + SetMethod(context, target, "setuid", SetUid); + SetMethod(context, target, "setgroups", SetGroups); } #endif // NODE_IMPLEMENTS_POSIX_CREDENTIALS } diff --git a/src/node_dir.cc b/src/node_dir.cc index c530da0cc3be4b..e501934ecc70ac 100644 --- a/src/node_dir.cc +++ b/src/node_dir.cc @@ -351,17 +351,18 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - env->SetMethod(target, "opendir", OpenDir); + SetMethod(context, target, "opendir", OpenDir); // Create FunctionTemplate for DirHandle - Local dir = env->NewFunctionTemplate(DirHandle::New); + Local dir = NewFunctionTemplate(isolate, DirHandle::New); dir->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(dir, "read", DirHandle::Read); - env->SetProtoMethod(dir, "close", DirHandle::Close); + SetProtoMethod(isolate, dir, "read", DirHandle::Read); + SetProtoMethod(isolate, dir, "close", DirHandle::Close); Local dirt = dir->InstanceTemplate(); dirt->SetInternalFieldCount(DirHandle::kInternalFieldCount); - env->SetConstructorFunction(target, "DirHandle", dir); + SetConstructorFunction(context, target, "DirHandle", dir); env->set_dir_instance_template(dirt); } diff --git a/src/node_dtrace.cc b/src/node_dtrace.cc index 52e63992833c38..9768880ac78a37 100644 --- a/src/node_dtrace.cc +++ b/src/node_dtrace.cc @@ -301,10 +301,8 @@ void InitializeDTrace(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - #if defined HAVE_DTRACE || defined HAVE_ETW -#define V(name) env->SetMethod(target, #name, name); +#define V(name) SetMethod(context, target, #name, name); NODE_PROBES(V) #undef V #endif // defined HAVE_DTRACE || defined HAVE_ETW diff --git a/src/node_errors.cc b/src/node_errors.cc index 73467cf84e2d79..7a4c8257253f3c 100644 --- a/src/node_errors.cc +++ b/src/node_errors.cc @@ -920,19 +920,23 @@ void Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod( - target, "setPrepareStackTraceCallback", SetPrepareStackTraceCallback); - env->SetMethod(target, "setSourceMapsEnabled", SetSourceMapsEnabled); - env->SetMethod(target, - "setMaybeCacheGeneratedSourceMap", - SetMaybeCacheGeneratedSourceMap); - env->SetMethod(target, - "setEnhanceStackForFatalException", - SetEnhanceStackForFatalException); - env->SetMethodNoSideEffect( - target, "noSideEffectsToString", NoSideEffectsToString); - env->SetMethod(target, "triggerUncaughtException", TriggerUncaughtException); + SetMethod(context, + target, + "setPrepareStackTraceCallback", + SetPrepareStackTraceCallback); + SetMethod(context, target, "setSourceMapsEnabled", SetSourceMapsEnabled); + SetMethod(context, + target, + "setMaybeCacheGeneratedSourceMap", + SetMaybeCacheGeneratedSourceMap); + SetMethod(context, + target, + "setEnhanceStackForFatalException", + SetEnhanceStackForFatalException); + SetMethodNoSideEffect( + context, target, "noSideEffectsToString", NoSideEffectsToString); + SetMethod( + context, target, "triggerUncaughtException", TriggerUncaughtException); } void DecorateErrorStack(Environment* env, diff --git a/src/node_file.cc b/src/node_file.cc index 5a3e54669049fb..6d1254958a3695 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -2465,46 +2465,46 @@ void Initialize(Local target, env->AddBindingData(context, target); if (binding_data == nullptr) return; - env->SetMethod(target, "access", Access); - env->SetMethod(target, "close", Close); - env->SetMethod(target, "open", Open); - env->SetMethod(target, "openFileHandle", OpenFileHandle); - env->SetMethod(target, "read", Read); - env->SetMethod(target, "readBuffers", ReadBuffers); - env->SetMethod(target, "fdatasync", Fdatasync); - env->SetMethod(target, "fsync", Fsync); - env->SetMethod(target, "rename", Rename); - env->SetMethod(target, "ftruncate", FTruncate); - env->SetMethod(target, "rmdir", RMDir); - env->SetMethod(target, "mkdir", MKDir); - env->SetMethod(target, "readdir", ReadDir); - env->SetMethod(target, "internalModuleReadJSON", InternalModuleReadJSON); - env->SetMethod(target, "internalModuleStat", InternalModuleStat); - env->SetMethod(target, "stat", Stat); - env->SetMethod(target, "lstat", LStat); - env->SetMethod(target, "fstat", FStat); - env->SetMethod(target, "link", Link); - env->SetMethod(target, "symlink", Symlink); - env->SetMethod(target, "readlink", ReadLink); - env->SetMethod(target, "unlink", Unlink); - env->SetMethod(target, "writeBuffer", WriteBuffer); - env->SetMethod(target, "writeBuffers", WriteBuffers); - env->SetMethod(target, "writeString", WriteString); - env->SetMethod(target, "realpath", RealPath); - env->SetMethod(target, "copyFile", CopyFile); - - env->SetMethod(target, "chmod", Chmod); - env->SetMethod(target, "fchmod", FChmod); - - env->SetMethod(target, "chown", Chown); - env->SetMethod(target, "fchown", FChown); - env->SetMethod(target, "lchown", LChown); - - env->SetMethod(target, "utimes", UTimes); - env->SetMethod(target, "futimes", FUTimes); - env->SetMethod(target, "lutimes", LUTimes); - - env->SetMethod(target, "mkdtemp", Mkdtemp); + SetMethod(context, target, "access", Access); + SetMethod(context, target, "close", Close); + SetMethod(context, target, "open", Open); + SetMethod(context, target, "openFileHandle", OpenFileHandle); + SetMethod(context, target, "read", Read); + SetMethod(context, target, "readBuffers", ReadBuffers); + SetMethod(context, target, "fdatasync", Fdatasync); + SetMethod(context, target, "fsync", Fsync); + SetMethod(context, target, "rename", Rename); + SetMethod(context, target, "ftruncate", FTruncate); + SetMethod(context, target, "rmdir", RMDir); + SetMethod(context, target, "mkdir", MKDir); + SetMethod(context, target, "readdir", ReadDir); + SetMethod(context, target, "internalModuleReadJSON", InternalModuleReadJSON); + SetMethod(context, target, "internalModuleStat", InternalModuleStat); + SetMethod(context, target, "stat", Stat); + SetMethod(context, target, "lstat", LStat); + SetMethod(context, target, "fstat", FStat); + SetMethod(context, target, "link", Link); + SetMethod(context, target, "symlink", Symlink); + SetMethod(context, target, "readlink", ReadLink); + SetMethod(context, target, "unlink", Unlink); + SetMethod(context, target, "writeBuffer", WriteBuffer); + SetMethod(context, target, "writeBuffers", WriteBuffers); + SetMethod(context, target, "writeString", WriteString); + SetMethod(context, target, "realpath", RealPath); + SetMethod(context, target, "copyFile", CopyFile); + + SetMethod(context, target, "chmod", Chmod); + SetMethod(context, target, "fchmod", FChmod); + + SetMethod(context, target, "chown", Chown); + SetMethod(context, target, "fchown", FChown); + SetMethod(context, target, "lchown", LChown); + + SetMethod(context, target, "utimes", UTimes); + SetMethod(context, target, "futimes", FUTimes); + SetMethod(context, target, "lutimes", LUTimes); + + SetMethod(context, target, "mkdtemp", Mkdtemp); target ->Set(context, @@ -2517,11 +2517,11 @@ void Initialize(Local target, StatWatcher::Initialize(env, target); // Create FunctionTemplate for FSReqCallback - Local fst = env->NewFunctionTemplate(NewFSReqCallback); + Local fst = NewFunctionTemplate(isolate, NewFSReqCallback); fst->InstanceTemplate()->SetInternalFieldCount( FSReqBase::kInternalFieldCount); fst->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "FSReqCallback", fst); + SetConstructorFunction(context, target, "FSReqCallback", fst); // Create FunctionTemplate for FileHandleReadWrap. There’s no need // to do anything in the constructor, so we only store the instance template. @@ -2546,14 +2546,14 @@ void Initialize(Local target, env->set_fsreqpromise_constructor_template(fpo); // Create FunctionTemplate for FileHandle - Local fd = env->NewFunctionTemplate(FileHandle::New); + Local fd = NewFunctionTemplate(isolate, FileHandle::New); fd->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(fd, "close", FileHandle::Close); - env->SetProtoMethod(fd, "releaseFD", FileHandle::ReleaseFD); + SetProtoMethod(isolate, fd, "close", FileHandle::Close); + SetProtoMethod(isolate, fd, "releaseFD", FileHandle::ReleaseFD); Local fdt = fd->InstanceTemplate(); fdt->SetInternalFieldCount(FileHandle::kInternalFieldCount); StreamBase::AddMethods(env, fd); - env->SetConstructorFunction(target, "FileHandle", fd); + SetConstructorFunction(context, target, "FileHandle", fd); env->set_fd_constructor_template(fdt); // Create FunctionTemplate for FileHandle::CloseReq diff --git a/src/node_http2.cc b/src/node_http2.cc index 4c180c539a7d5e..53435e698cffb3 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -3201,10 +3201,10 @@ void Initialize(Local target, NODE_DEFINE_CONSTANT(target, kSessionHasAltsvcListeners); // Method to fetch the nghttp2 string description of an nghttp2 error code - env->SetMethod(target, "nghttp2ErrorString", HttpErrorString); - env->SetMethod(target, "refreshDefaultSettings", RefreshDefaultSettings); - env->SetMethod(target, "packSettings", PackSettings); - env->SetMethod(target, "setCallbackFunctions", SetCallbackFunctions); + SetMethod(context, target, "nghttp2ErrorString", HttpErrorString); + SetMethod(context, target, "refreshDefaultSettings", RefreshDefaultSettings); + SetMethod(context, target, "packSettings", PackSettings); + SetMethod(context, target, "setCallbackFunctions", SetCallbackFunctions); Local ping = FunctionTemplate::New(env->isolate()); ping->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "Http2Ping")); @@ -3220,50 +3220,54 @@ void Initialize(Local target, env->set_http2settings_constructor_template(settingt); Local stream = FunctionTemplate::New(env->isolate()); - env->SetProtoMethod(stream, "id", Http2Stream::GetID); - env->SetProtoMethod(stream, "destroy", Http2Stream::Destroy); - env->SetProtoMethod(stream, "priority", Http2Stream::Priority); - env->SetProtoMethod(stream, "pushPromise", Http2Stream::PushPromise); - env->SetProtoMethod(stream, "info", Http2Stream::Info); - env->SetProtoMethod(stream, "trailers", Http2Stream::Trailers); - env->SetProtoMethod(stream, "respond", Http2Stream::Respond); - env->SetProtoMethod(stream, "rstStream", Http2Stream::RstStream); - env->SetProtoMethod(stream, "refreshState", Http2Stream::RefreshState); + SetProtoMethod(isolate, stream, "id", Http2Stream::GetID); + SetProtoMethod(isolate, stream, "destroy", Http2Stream::Destroy); + SetProtoMethod(isolate, stream, "priority", Http2Stream::Priority); + SetProtoMethod(isolate, stream, "pushPromise", Http2Stream::PushPromise); + SetProtoMethod(isolate, stream, "info", Http2Stream::Info); + SetProtoMethod(isolate, stream, "trailers", Http2Stream::Trailers); + SetProtoMethod(isolate, stream, "respond", Http2Stream::Respond); + SetProtoMethod(isolate, stream, "rstStream", Http2Stream::RstStream); + SetProtoMethod(isolate, stream, "refreshState", Http2Stream::RefreshState); stream->Inherit(AsyncWrap::GetConstructorTemplate(env)); StreamBase::AddMethods(env, stream); Local streamt = stream->InstanceTemplate(); streamt->SetInternalFieldCount(StreamBase::kInternalFieldCount); env->set_http2stream_constructor_template(streamt); - env->SetConstructorFunction(target, "Http2Stream", stream); + SetConstructorFunction(context, target, "Http2Stream", stream); Local session = - env->NewFunctionTemplate(Http2Session::New); + NewFunctionTemplate(isolate, Http2Session::New); session->InstanceTemplate()->SetInternalFieldCount( Http2Session::kInternalFieldCount); session->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(session, "origin", Http2Session::Origin); - env->SetProtoMethod(session, "altsvc", Http2Session::AltSvc); - env->SetProtoMethod(session, "ping", Http2Session::Ping); - env->SetProtoMethod(session, "consume", Http2Session::Consume); - env->SetProtoMethod(session, "receive", Http2Session::Receive); - env->SetProtoMethod(session, "destroy", Http2Session::Destroy); - env->SetProtoMethod(session, "goaway", Http2Session::Goaway); - env->SetProtoMethod(session, "settings", Http2Session::Settings); - env->SetProtoMethod(session, "request", Http2Session::Request); - env->SetProtoMethod(session, "setNextStreamID", - Http2Session::SetNextStreamID); - env->SetProtoMethod(session, "setLocalWindowSize", - Http2Session::SetLocalWindowSize); - env->SetProtoMethod(session, "updateChunksSent", - Http2Session::UpdateChunksSent); - env->SetProtoMethod(session, "refreshState", Http2Session::RefreshState); - env->SetProtoMethod( - session, "localSettings", + SetProtoMethod(isolate, session, "origin", Http2Session::Origin); + SetProtoMethod(isolate, session, "altsvc", Http2Session::AltSvc); + SetProtoMethod(isolate, session, "ping", Http2Session::Ping); + SetProtoMethod(isolate, session, "consume", Http2Session::Consume); + SetProtoMethod(isolate, session, "receive", Http2Session::Receive); + SetProtoMethod(isolate, session, "destroy", Http2Session::Destroy); + SetProtoMethod(isolate, session, "goaway", Http2Session::Goaway); + SetProtoMethod(isolate, session, "settings", Http2Session::Settings); + SetProtoMethod(isolate, session, "request", Http2Session::Request); + SetProtoMethod( + isolate, session, "setNextStreamID", Http2Session::SetNextStreamID); + SetProtoMethod( + isolate, session, "setLocalWindowSize", Http2Session::SetLocalWindowSize); + SetProtoMethod( + isolate, session, "updateChunksSent", Http2Session::UpdateChunksSent); + SetProtoMethod(isolate, session, "refreshState", Http2Session::RefreshState); + SetProtoMethod( + isolate, + session, + "localSettings", Http2Session::RefreshSettings); - env->SetProtoMethod( - session, "remoteSettings", + SetProtoMethod( + isolate, + session, + "remoteSettings", Http2Session::RefreshSettings); - env->SetConstructorFunction(target, "Http2Session", session); + SetConstructorFunction(context, target, "Http2Session", session); Local constants = Object::New(isolate); diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index 620d608713d31d..914d0294214853 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -1175,11 +1175,12 @@ void InitializeHttpParser(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); BindingData* const binding_data = env->AddBindingData(context, target); if (binding_data == nullptr) return; - Local t = env->NewFunctionTemplate(Parser::New); + Local t = NewFunctionTemplate(isolate, Parser::New); t->InstanceTemplate()->SetInternalFieldCount(Parser::kInternalFieldCount); t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "REQUEST"), @@ -1223,30 +1224,31 @@ void InitializeHttpParser(Local target, methods).Check(); t->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "close", Parser::Close); - env->SetProtoMethod(t, "free", Parser::Free); - env->SetProtoMethod(t, "remove", Parser::Remove); - env->SetProtoMethod(t, "execute", Parser::Execute); - env->SetProtoMethod(t, "finish", Parser::Finish); - env->SetProtoMethod(t, "initialize", Parser::Initialize); - env->SetProtoMethod(t, "pause", Parser::Pause); - env->SetProtoMethod(t, "resume", Parser::Pause); - env->SetProtoMethod(t, "consume", Parser::Consume); - env->SetProtoMethod(t, "unconsume", Parser::Unconsume); - env->SetProtoMethod(t, "getCurrentBuffer", Parser::GetCurrentBuffer); - env->SetProtoMethod(t, "duration", Parser::Duration); - env->SetProtoMethod(t, "headersCompleted", Parser::HeadersCompleted); - - env->SetConstructorFunction(target, "HTTPParser", t); - - Local c = env->NewFunctionTemplate(ConnectionsList::New); + SetProtoMethod(isolate, t, "close", Parser::Close); + SetProtoMethod(isolate, t, "free", Parser::Free); + SetProtoMethod(isolate, t, "remove", Parser::Remove); + SetProtoMethod(isolate, t, "execute", Parser::Execute); + SetProtoMethod(isolate, t, "finish", Parser::Finish); + SetProtoMethod(isolate, t, "initialize", Parser::Initialize); + SetProtoMethod(isolate, t, "pause", Parser::Pause); + SetProtoMethod(isolate, t, "resume", Parser::Pause); + SetProtoMethod(isolate, t, "consume", Parser::Consume); + SetProtoMethod(isolate, t, "unconsume", Parser::Unconsume); + SetProtoMethod(isolate, t, "getCurrentBuffer", Parser::GetCurrentBuffer); + SetProtoMethod(isolate, t, "duration", Parser::Duration); + SetProtoMethod(isolate, t, "headersCompleted", Parser::HeadersCompleted); + + SetConstructorFunction(context, target, "HTTPParser", t); + + Local c = + NewFunctionTemplate(isolate, ConnectionsList::New); c->InstanceTemplate() ->SetInternalFieldCount(ConnectionsList::kInternalFieldCount); - env->SetProtoMethod(c, "all", ConnectionsList::All); - env->SetProtoMethod(c, "idle", ConnectionsList::Idle); - env->SetProtoMethod(c, "active", ConnectionsList::Active); - env->SetProtoMethod(c, "expired", ConnectionsList::Expired); - env->SetConstructorFunction(target, "ConnectionsList", c); + SetProtoMethod(isolate, c, "all", ConnectionsList::All); + SetProtoMethod(isolate, c, "idle", ConnectionsList::Idle); + SetProtoMethod(isolate, c, "active", ConnectionsList::Active); + SetProtoMethod(isolate, c, "expired", ConnectionsList::Expired); + SetConstructorFunction(context, target, "ConnectionsList", c); } } // anonymous namespace diff --git a/src/node_i18n.cc b/src/node_i18n.cc index c537a247f55ff0..7a489686d30330 100644 --- a/src/node_i18n.cc +++ b/src/node_i18n.cc @@ -834,17 +834,17 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "toUnicode", ToUnicode); - env->SetMethod(target, "toASCII", ToASCII); - env->SetMethod(target, "getStringWidth", GetStringWidth); + SetMethod(context, target, "toUnicode", ToUnicode); + SetMethod(context, target, "toASCII", ToASCII); + SetMethod(context, target, "getStringWidth", GetStringWidth); // One-shot converters - env->SetMethod(target, "icuErrName", ICUErrorName); - env->SetMethod(target, "transcode", Transcode); + SetMethod(context, target, "icuErrName", ICUErrorName); + SetMethod(context, target, "transcode", Transcode); // ConverterObject { - Local t = FunctionTemplate::New(env->isolate()); + Local t = NewFunctionTemplate(env->isolate(), nullptr); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount( ConverterObject::kInternalFieldCount); @@ -854,9 +854,9 @@ void Initialize(Local target, env->set_i18n_converter_template(t->InstanceTemplate()); } - env->SetMethod(target, "getConverter", ConverterObject::Create); - env->SetMethod(target, "decode", ConverterObject::Decode); - env->SetMethod(target, "hasConverter", ConverterObject::Has); + SetMethod(context, target, "getConverter", ConverterObject::Create); + SetMethod(context, target, "decode", ConverterObject::Decode); + SetMethod(context, target, "hasConverter", ConverterObject::Has); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_messaging.cc b/src/node_messaging.cc index 6403950e9c8a96..58cd1882dd2c06 100644 --- a/src/node_messaging.cc +++ b/src/node_messaging.cc @@ -1111,14 +1111,15 @@ Local GetMessagePortConstructorTemplate(Environment* env) { return templ; { - Local m = env->NewFunctionTemplate(MessagePort::New); + Isolate* isolate = env->isolate(); + Local m = NewFunctionTemplate(isolate, MessagePort::New); m->SetClassName(env->message_port_constructor_string()); m->InstanceTemplate()->SetInternalFieldCount( MessagePort::kInternalFieldCount); m->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(m, "postMessage", MessagePort::PostMessage); - env->SetProtoMethod(m, "start", MessagePort::Start); + SetProtoMethod(isolate, m, "postMessage", MessagePort::PostMessage); + SetProtoMethod(isolate, m, "start", MessagePort::Start); env->set_message_port_constructor_template(m); } @@ -1451,38 +1452,43 @@ static void InitMessaging(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); { - env->SetConstructorFunction( - target, - "MessageChannel", - env->NewFunctionTemplate(MessageChannel)); + SetConstructorFunction(context, + target, + "MessageChannel", + NewFunctionTemplate(isolate, MessageChannel)); } { - Local t = env->NewFunctionTemplate(JSTransferable::New); + Local t = + NewFunctionTemplate(isolate, JSTransferable::New); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount( JSTransferable::kInternalFieldCount); - env->SetConstructorFunction(target, "JSTransferable", t); + SetConstructorFunction(context, target, "JSTransferable", t); } - env->SetConstructorFunction( - target, - env->message_port_constructor_string(), - GetMessagePortConstructorTemplate(env)); + SetConstructorFunction(context, + target, + env->message_port_constructor_string(), + GetMessagePortConstructorTemplate(env)); // These are not methods on the MessagePort prototype, because // the browser equivalents do not provide them. - env->SetMethod(target, "stopMessagePort", MessagePort::Stop); - env->SetMethod(target, "checkMessagePort", MessagePort::CheckType); - env->SetMethod(target, "drainMessagePort", MessagePort::Drain); - env->SetMethod(target, "receiveMessageOnPort", MessagePort::ReceiveMessage); - env->SetMethod(target, "moveMessagePortToContext", - MessagePort::MoveToContext); - env->SetMethod(target, "setDeserializerCreateObjectFunction", - SetDeserializerCreateObjectFunction); - env->SetMethod(target, "broadcastChannel", BroadcastChannel); + SetMethod(context, target, "stopMessagePort", MessagePort::Stop); + SetMethod(context, target, "checkMessagePort", MessagePort::CheckType); + SetMethod(context, target, "drainMessagePort", MessagePort::Drain); + SetMethod( + context, target, "receiveMessageOnPort", MessagePort::ReceiveMessage); + SetMethod( + context, target, "moveMessagePortToContext", MessagePort::MoveToContext); + SetMethod(context, + target, + "setDeserializerCreateObjectFunction", + SetDeserializerCreateObjectFunction); + SetMethod(context, target, "broadcastChannel", BroadcastChannel); { Local domexception = GetDOMException(context).ToLocalChecked(); diff --git a/src/node_native_module.cc b/src/node_native_module.cc index 38af67c8955fd7..7a515cc1efaeb2 100644 --- a/src/node_native_module.cc +++ b/src/node_native_module.cc @@ -568,9 +568,10 @@ void NativeModuleLoader::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); target - ->SetAccessor(env->context(), + ->SetAccessor(context, env->config_string(), ConfigStringGetter, nullptr, @@ -580,8 +581,8 @@ void NativeModuleLoader::Initialize(Local target, SideEffectType::kHasNoSideEffect) .Check(); target - ->SetAccessor(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "moduleIds"), + ->SetAccessor(context, + FIXED_ONE_BYTE_STRING(isolate, "moduleIds"), ModuleIdsGetter, nullptr, MaybeLocal(), @@ -591,8 +592,8 @@ void NativeModuleLoader::Initialize(Local target, .Check(); target - ->SetAccessor(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "moduleCategories"), + ->SetAccessor(context, + FIXED_ONE_BYTE_STRING(isolate, "moduleCategories"), GetModuleCategories, nullptr, Local(), @@ -601,10 +602,11 @@ void NativeModuleLoader::Initialize(Local target, SideEffectType::kHasNoSideEffect) .Check(); - env->SetMethod(target, "getCacheUsage", NativeModuleLoader::GetCacheUsage); - env->SetMethod( - target, "compileFunction", NativeModuleLoader::CompileFunction); - env->SetMethod(target, "hasCachedBuiltins", HasCachedBuiltins); + SetMethod( + context, target, "getCacheUsage", NativeModuleLoader::GetCacheUsage); + SetMethod( + context, target, "compileFunction", NativeModuleLoader::CompileFunction); + SetMethod(context, target, "hasCachedBuiltins", HasCachedBuiltins); // internalBinding('native_module') should be frozen target->SetIntegrityLevel(context, IntegrityLevel::kFrozen).FromJust(); } diff --git a/src/node_options.cc b/src/node_options.cc index 16a7716fbc984e..670a04fd7292aa 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -1147,8 +1147,9 @@ void Initialize(Local target, void* priv) { Environment* env = Environment::GetCurrent(context); Isolate* isolate = env->isolate(); - env->SetMethodNoSideEffect(target, "getCLIOptions", GetCLIOptions); - env->SetMethodNoSideEffect(target, "getEmbedderOptions", GetEmbedderOptions); + SetMethodNoSideEffect(context, target, "getCLIOptions", GetCLIOptions); + SetMethodNoSideEffect( + context, target, "getEmbedderOptions", GetEmbedderOptions); Local env_settings = Object::New(isolate); NODE_DEFINE_CONSTANT(env_settings, kAllowedInEnvironment); diff --git a/src/node_os.cc b/src/node_os.cc index 046a6106ccd0e5..5b4d6567fb3b0a 100644 --- a/src/node_os.cc +++ b/src/node_os.cc @@ -382,21 +382,23 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "getHostname", GetHostname); - env->SetMethod(target, "getLoadAvg", GetLoadAvg); - env->SetMethod(target, "getUptime", GetUptime); - env->SetMethod(target, "getTotalMem", GetTotalMemory); - env->SetMethod(target, "getFreeMem", GetFreeMemory); - env->SetMethod(target, "getCPUs", GetCPUInfo); - env->SetMethod(target, "getInterfaceAddresses", GetInterfaceAddresses); - env->SetMethod(target, "getHomeDirectory", GetHomeDirectory); - env->SetMethod(target, "getUserInfo", GetUserInfo); - env->SetMethod(target, "setPriority", SetPriority); - env->SetMethod(target, "getPriority", GetPriority); - env->SetMethod(target, "getOSInformation", GetOSInformation); - target->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "isBigEndian"), - Boolean::New(env->isolate(), IsBigEndian())).Check(); + SetMethod(context, target, "getHostname", GetHostname); + SetMethod(context, target, "getLoadAvg", GetLoadAvg); + SetMethod(context, target, "getUptime", GetUptime); + SetMethod(context, target, "getTotalMem", GetTotalMemory); + SetMethod(context, target, "getFreeMem", GetFreeMemory); + SetMethod(context, target, "getCPUs", GetCPUInfo); + SetMethod(context, target, "getInterfaceAddresses", GetInterfaceAddresses); + SetMethod(context, target, "getHomeDirectory", GetHomeDirectory); + SetMethod(context, target, "getUserInfo", GetUserInfo); + SetMethod(context, target, "setPriority", SetPriority); + SetMethod(context, target, "getPriority", GetPriority); + SetMethod(context, target, "getOSInformation", GetOSInformation); + target + ->Set(context, + FIXED_ONE_BYTE_STRING(env->isolate(), "isBigEndian"), + Boolean::New(env->isolate(), IsBigEndian())) + .Check(); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_perf.cc b/src/node_perf.cc index 253f72157e8a7b..15f45cba78c1e0 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -289,19 +289,21 @@ void Initialize(Local target, target->Set(context, performanceEntryString, fn).Check(); env->set_performance_entry_template(fn); - env->SetMethod(target, "markMilestone", MarkMilestone); - env->SetMethod(target, "setupObservers", SetupPerformanceObservers); - env->SetMethod(target, - "installGarbageCollectionTracking", - InstallGarbageCollectionTracking); - env->SetMethod(target, - "removeGarbageCollectionTracking", - RemoveGarbageCollectionTracking); - env->SetMethod(target, "notify", Notify); - env->SetMethod(target, "loopIdleTime", LoopIdleTime); - env->SetMethod(target, "getTimeOrigin", GetTimeOrigin); - env->SetMethod(target, "getTimeOriginTimestamp", GetTimeOriginTimeStamp); - env->SetMethod(target, "createELDHistogram", CreateELDHistogram); + SetMethod(context, target, "markMilestone", MarkMilestone); + SetMethod(context, target, "setupObservers", SetupPerformanceObservers); + SetMethod(context, + target, + "installGarbageCollectionTracking", + InstallGarbageCollectionTracking); + SetMethod(context, + target, + "removeGarbageCollectionTracking", + RemoveGarbageCollectionTracking); + SetMethod(context, target, "notify", Notify); + SetMethod(context, target, "loopIdleTime", LoopIdleTime); + SetMethod(context, target, "getTimeOrigin", GetTimeOrigin); + SetMethod(context, target, "getTimeOriginTimestamp", GetTimeOriginTimeStamp); + SetMethod(context, target, "createELDHistogram", CreateELDHistogram); Local constants = Object::New(isolate); diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index 350a7094baad59..024212132f244c 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -464,8 +464,9 @@ v8::CFunction BindingData::fast_number_(v8::CFunction::Make(FastNumber)); v8::CFunction BindingData::fast_bigint_(v8::CFunction::Make(FastBigInt)); void BindingData::AddMethods() { - env()->SetFastMethod(object(), "hrtime", SlowNumber, &fast_number_); - env()->SetFastMethod(object(), "hrtimeBigInt", SlowBigInt, &fast_bigint_); + Local ctx = env()->context(); + SetFastMethod(ctx, object(), "hrtime", SlowNumber, &fast_number_); + SetFastMethod(ctx, object(), "hrtimeBigInt", SlowBigInt, &fast_bigint_); } void BindingData::RegisterExternalReferences( @@ -560,31 +561,31 @@ static void Initialize(Local target, // define various internal methods if (env->owns_process_state()) { - env->SetMethod(target, "_debugProcess", DebugProcess); - env->SetMethod(target, "_debugEnd", DebugEnd); - env->SetMethod(target, "abort", Abort); - env->SetMethod(target, "causeSegfault", CauseSegfault); - env->SetMethod(target, "chdir", Chdir); + SetMethod(context, target, "_debugProcess", DebugProcess); + SetMethod(context, target, "_debugEnd", DebugEnd); + SetMethod(context, target, "abort", Abort); + SetMethod(context, target, "causeSegfault", CauseSegfault); + SetMethod(context, target, "chdir", Chdir); } - env->SetMethod(target, "umask", Umask); - env->SetMethod(target, "_rawDebug", RawDebug); - env->SetMethod(target, "memoryUsage", MemoryUsage); - env->SetMethod(target, "rss", Rss); - env->SetMethod(target, "cpuUsage", CPUUsage); - env->SetMethod(target, "resourceUsage", ResourceUsage); - - env->SetMethod(target, "_getActiveRequests", GetActiveRequests); - env->SetMethod(target, "_getActiveRequestsInfo", GetActiveRequestsInfo); - env->SetMethod(target, "_getActiveHandles", GetActiveHandles); - env->SetMethod(target, "_getActiveHandlesInfo", GetActiveHandlesInfo); - env->SetMethod(target, "_kill", Kill); - - env->SetMethodNoSideEffect(target, "cwd", Cwd); - env->SetMethod(target, "dlopen", binding::DLOpen); - env->SetMethod(target, "reallyExit", ReallyExit); - env->SetMethodNoSideEffect(target, "uptime", Uptime); - env->SetMethod(target, "patchProcessObject", PatchProcessObject); + SetMethod(context, target, "umask", Umask); + SetMethod(context, target, "_rawDebug", RawDebug); + SetMethod(context, target, "memoryUsage", MemoryUsage); + SetMethod(context, target, "rss", Rss); + SetMethod(context, target, "cpuUsage", CPUUsage); + SetMethod(context, target, "resourceUsage", ResourceUsage); + + SetMethod(context, target, "_getActiveRequestsInfo", GetActiveRequestsInfo); + SetMethod(context, target, "_getActiveRequests", GetActiveRequests); + SetMethod(context, target, "_getActiveHandles", GetActiveHandles); + SetMethod(context, target, "_getActiveHandlesInfo", GetActiveHandlesInfo); + SetMethod(context, target, "_kill", Kill); + + SetMethodNoSideEffect(context, target, "cwd", Cwd); + SetMethod(context, target, "dlopen", binding::DLOpen); + SetMethod(context, target, "reallyExit", ReallyExit); + SetMethodNoSideEffect(context, target, "uptime", Uptime); + SetMethod(context, target, "patchProcessObject", PatchProcessObject); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_process_object.cc b/src/node_process_object.cc index 29f6569a45e5b2..438ab3c0f09eea 100644 --- a/src/node_process_object.cc +++ b/src/node_process_object.cc @@ -135,7 +135,7 @@ MaybeLocal CreateProcessObject(Environment* env) { // process._rawDebug: may be overwritten later in JS land, but should be // available from the beginning for debugging purposes - env->SetMethod(process, "_rawDebug", RawDebug); + SetMethod(context, process, "_rawDebug", RawDebug); return scope.Escape(process); } diff --git a/src/node_report_module.cc b/src/node_report_module.cc index 160498c6276a3f..b57a933972d6c0 100644 --- a/src/node_report_module.cc +++ b/src/node_report_module.cc @@ -18,6 +18,7 @@ namespace report { using node::Environment; using node::Mutex; +using node::SetMethod; using node::Utf8Value; using v8::Context; using v8::FunctionCallbackInfo; @@ -175,26 +176,29 @@ static void Initialize(Local exports, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - - env->SetMethod(exports, "writeReport", WriteReport); - env->SetMethod(exports, "getReport", GetReport); - env->SetMethod(exports, "getCompact", GetCompact); - env->SetMethod(exports, "setCompact", SetCompact); - env->SetMethod(exports, "getDirectory", GetDirectory); - env->SetMethod(exports, "setDirectory", SetDirectory); - env->SetMethod(exports, "getFilename", GetFilename); - env->SetMethod(exports, "setFilename", SetFilename); - env->SetMethod(exports, "getSignal", GetSignal); - env->SetMethod(exports, "setSignal", SetSignal); - env->SetMethod(exports, "shouldReportOnFatalError", ShouldReportOnFatalError); - env->SetMethod(exports, "setReportOnFatalError", SetReportOnFatalError); - env->SetMethod(exports, "shouldReportOnSignal", ShouldReportOnSignal); - env->SetMethod(exports, "setReportOnSignal", SetReportOnSignal); - env->SetMethod(exports, "shouldReportOnUncaughtException", - ShouldReportOnUncaughtException); - env->SetMethod(exports, "setReportOnUncaughtException", - SetReportOnUncaughtException); + SetMethod(context, exports, "writeReport", WriteReport); + SetMethod(context, exports, "getReport", GetReport); + SetMethod(context, exports, "getCompact", GetCompact); + SetMethod(context, exports, "setCompact", SetCompact); + SetMethod(context, exports, "getDirectory", GetDirectory); + SetMethod(context, exports, "setDirectory", SetDirectory); + SetMethod(context, exports, "getFilename", GetFilename); + SetMethod(context, exports, "setFilename", SetFilename); + SetMethod(context, exports, "getSignal", GetSignal); + SetMethod(context, exports, "setSignal", SetSignal); + SetMethod( + context, exports, "shouldReportOnFatalError", ShouldReportOnFatalError); + SetMethod(context, exports, "setReportOnFatalError", SetReportOnFatalError); + SetMethod(context, exports, "shouldReportOnSignal", ShouldReportOnSignal); + SetMethod(context, exports, "setReportOnSignal", SetReportOnSignal); + SetMethod(context, + exports, + "shouldReportOnUncaughtException", + ShouldReportOnUncaughtException); + SetMethod(context, + exports, + "setReportOnUncaughtException", + SetReportOnUncaughtException); } void RegisterExternalReferences(node::ExternalReferenceRegistry* registry) { diff --git a/src/node_serdes.cc b/src/node_serdes.cc index f6f0034bc24d09..45a16d9de43703 100644 --- a/src/node_serdes.cc +++ b/src/node_serdes.cc @@ -455,53 +455,62 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); + Local ser = - env->NewFunctionTemplate(SerializerContext::New); + NewFunctionTemplate(isolate, SerializerContext::New); ser->InstanceTemplate()->SetInternalFieldCount( SerializerContext::kInternalFieldCount); ser->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(ser, "writeHeader", SerializerContext::WriteHeader); - env->SetProtoMethod(ser, "writeValue", SerializerContext::WriteValue); - env->SetProtoMethod(ser, "releaseBuffer", SerializerContext::ReleaseBuffer); - env->SetProtoMethod(ser, - "transferArrayBuffer", - SerializerContext::TransferArrayBuffer); - env->SetProtoMethod(ser, "writeUint32", SerializerContext::WriteUint32); - env->SetProtoMethod(ser, "writeUint64", SerializerContext::WriteUint64); - env->SetProtoMethod(ser, "writeDouble", SerializerContext::WriteDouble); - env->SetProtoMethod(ser, "writeRawBytes", SerializerContext::WriteRawBytes); - env->SetProtoMethod(ser, - "_setTreatArrayBufferViewsAsHostObjects", - SerializerContext::SetTreatArrayBufferViewsAsHostObjects); + SetProtoMethod(isolate, ser, "writeHeader", SerializerContext::WriteHeader); + SetProtoMethod(isolate, ser, "writeValue", SerializerContext::WriteValue); + SetProtoMethod( + isolate, ser, "releaseBuffer", SerializerContext::ReleaseBuffer); + SetProtoMethod(isolate, + ser, + "transferArrayBuffer", + SerializerContext::TransferArrayBuffer); + SetProtoMethod(isolate, ser, "writeUint32", SerializerContext::WriteUint32); + SetProtoMethod(isolate, ser, "writeUint64", SerializerContext::WriteUint64); + SetProtoMethod(isolate, ser, "writeDouble", SerializerContext::WriteDouble); + SetProtoMethod( + isolate, ser, "writeRawBytes", SerializerContext::WriteRawBytes); + SetProtoMethod(isolate, + ser, + "_setTreatArrayBufferViewsAsHostObjects", + SerializerContext::SetTreatArrayBufferViewsAsHostObjects); ser->ReadOnlyPrototype(); - env->SetConstructorFunction(target, "Serializer", ser); + SetConstructorFunction(context, target, "Serializer", ser); Local des = - env->NewFunctionTemplate(DeserializerContext::New); + NewFunctionTemplate(isolate, DeserializerContext::New); des->InstanceTemplate()->SetInternalFieldCount( DeserializerContext::kInternalFieldCount); des->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(des, "readHeader", DeserializerContext::ReadHeader); - env->SetProtoMethod(des, "readValue", DeserializerContext::ReadValue); - env->SetProtoMethod(des, - "getWireFormatVersion", - DeserializerContext::GetWireFormatVersion); - env->SetProtoMethod(des, - "transferArrayBuffer", - DeserializerContext::TransferArrayBuffer); - env->SetProtoMethod(des, "readUint32", DeserializerContext::ReadUint32); - env->SetProtoMethod(des, "readUint64", DeserializerContext::ReadUint64); - env->SetProtoMethod(des, "readDouble", DeserializerContext::ReadDouble); - env->SetProtoMethod(des, "_readRawBytes", DeserializerContext::ReadRawBytes); + SetProtoMethod(isolate, des, "readHeader", DeserializerContext::ReadHeader); + SetProtoMethod(isolate, des, "readValue", DeserializerContext::ReadValue); + SetProtoMethod(isolate, + des, + "getWireFormatVersion", + DeserializerContext::GetWireFormatVersion); + SetProtoMethod(isolate, + des, + "transferArrayBuffer", + DeserializerContext::TransferArrayBuffer); + SetProtoMethod(isolate, des, "readUint32", DeserializerContext::ReadUint32); + SetProtoMethod(isolate, des, "readUint64", DeserializerContext::ReadUint64); + SetProtoMethod(isolate, des, "readDouble", DeserializerContext::ReadDouble); + SetProtoMethod( + isolate, des, "_readRawBytes", DeserializerContext::ReadRawBytes); des->SetLength(1); des->ReadOnlyPrototype(); - env->SetConstructorFunction(target, "Deserializer", des); + SetConstructorFunction(context, target, "Deserializer", des); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index 9e88c2157d272c..a7551bf1af14c0 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -520,13 +520,14 @@ void Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "compileSerializeMain", CompileSerializeMain); - env->SetMethod(target, "markBootstrapComplete", MarkBootstrapComplete); - env->SetMethod(target, "setSerializeCallback", SetSerializeCallback); - env->SetMethod(target, "setDeserializeCallback", SetDeserializeCallback); - env->SetMethod( - target, "setDeserializeMainFunction", SetDeserializeMainFunction); + SetMethod(context, target, "compileSerializeMain", CompileSerializeMain); + SetMethod(context, target, "markBootstrapComplete", MarkBootstrapComplete); + SetMethod(context, target, "setSerializeCallback", SetSerializeCallback); + SetMethod(context, target, "setDeserializeCallback", SetDeserializeCallback); + SetMethod(context, + target, + "setDeserializeMainFunction", + SetDeserializeMainFunction); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_sockaddr.cc b/src/node_sockaddr.cc index d29414302b7d28..331af1d9dba8e0 100644 --- a/src/node_sockaddr.cc +++ b/src/node_sockaddr.cc @@ -17,6 +17,7 @@ using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -697,15 +698,16 @@ Local SocketAddressBlockListWrap::GetConstructorTemplate( Environment* env) { Local tmpl = env->blocklist_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(SocketAddressBlockListWrap::New); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, SocketAddressBlockListWrap::New); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "BlockList")); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); tmpl->InstanceTemplate()->SetInternalFieldCount(kInternalFieldCount); - env->SetProtoMethod(tmpl, "addAddress", AddAddress); - env->SetProtoMethod(tmpl, "addRange", AddRange); - env->SetProtoMethod(tmpl, "addSubnet", AddSubnet); - env->SetProtoMethod(tmpl, "check", Check); - env->SetProtoMethod(tmpl, "getRules", GetRules); + SetProtoMethod(isolate, tmpl, "addAddress", AddAddress); + SetProtoMethod(isolate, tmpl, "addRange", AddRange); + SetProtoMethod(isolate, tmpl, "addSubnet", AddSubnet); + SetProtoMethod(isolate, tmpl, "check", Check); + SetProtoMethod(isolate, tmpl, "getRules", GetRules); env->set_blocklist_constructor_template(tmpl); } return tmpl; @@ -718,11 +720,11 @@ void SocketAddressBlockListWrap::Initialize( void* priv) { Environment* env = Environment::GetCurrent(context); - env->SetConstructorFunction( - target, - "BlockList", - GetConstructorTemplate(env), - Environment::SetConstructorFunctionFlag::NONE); + SetConstructorFunction(context, + target, + "BlockList", + GetConstructorTemplate(env), + SetConstructorFunctionFlag::NONE); SocketAddressBase::Initialize(env, target); @@ -750,25 +752,26 @@ Local SocketAddressBase::GetConstructorTemplate( Environment* env) { Local tmpl = env->socketaddress_constructor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, New); tmpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "SocketAddress")); tmpl->InstanceTemplate()->SetInternalFieldCount( SocketAddressBase::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(tmpl, "detail", Detail); - env->SetProtoMethod(tmpl, "legacyDetail", LegacyDetail); - env->SetProtoMethodNoSideEffect(tmpl, "flowlabel", GetFlowLabel); + SetProtoMethod(isolate, tmpl, "detail", Detail); + SetProtoMethod(isolate, tmpl, "legacyDetail", LegacyDetail); + SetProtoMethodNoSideEffect(isolate, tmpl, "flowlabel", GetFlowLabel); env->set_socketaddress_constructor_template(tmpl); } return tmpl; } void SocketAddressBase::Initialize(Environment* env, Local target) { - env->SetConstructorFunction( - target, - "SocketAddress", - GetConstructorTemplate(env), - Environment::SetConstructorFunctionFlag::NONE); + SetConstructorFunction(env->context(), + target, + "SocketAddress", + GetConstructorTemplate(env), + SetConstructorFunctionFlag::NONE); } BaseObjectPtr SocketAddressBase::Create( diff --git a/src/node_stat_watcher.cc b/src/node_stat_watcher.cc index b9f7903a2fdcb6..de8c099ca58594 100644 --- a/src/node_stat_watcher.cc +++ b/src/node_stat_watcher.cc @@ -37,23 +37,24 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Uint32; using v8::Value; - void StatWatcher::Initialize(Environment* env, Local target) { + Isolate* isolate = env->isolate(); HandleScope scope(env->isolate()); - Local t = env->NewFunctionTemplate(StatWatcher::New); + Local t = NewFunctionTemplate(isolate, StatWatcher::New); t->InstanceTemplate()->SetInternalFieldCount( StatWatcher::kInternalFieldCount); t->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "start", StatWatcher::Start); + SetProtoMethod(isolate, t, "start", StatWatcher::Start); - env->SetConstructorFunction(target, "StatWatcher", t); + SetConstructorFunction(env->context(), target, "StatWatcher", t); } void StatWatcher::RegisterExternalReferences( diff --git a/src/node_task_queue.cc b/src/node_task_queue.cc index 9bbdb3180217b3..2e62a072ee493d 100644 --- a/src/node_task_queue.cc +++ b/src/node_task_queue.cc @@ -196,9 +196,9 @@ static void Initialize(Local target, Environment* env = Environment::GetCurrent(context); Isolate* isolate = env->isolate(); - env->SetMethod(target, "enqueueMicrotask", EnqueueMicrotask); - env->SetMethod(target, "setTickCallback", SetTickCallback); - env->SetMethod(target, "runMicrotasks", RunMicrotasks); + SetMethod(context, target, "enqueueMicrotask", EnqueueMicrotask); + SetMethod(context, target, "setTickCallback", SetTickCallback); + SetMethod(context, target, "runMicrotasks", RunMicrotasks); target->Set(env->context(), FIXED_ONE_BYTE_STRING(isolate, "tickInfo"), env->tick_info()->fields().GetJSArray()).Check(); @@ -212,9 +212,8 @@ static void Initialize(Local target, target->Set(env->context(), FIXED_ONE_BYTE_STRING(isolate, "promiseRejectEvents"), events).Check(); - env->SetMethod(target, - "setPromiseRejectCallback", - SetPromiseRejectCallback); + SetMethod( + context, target, "setPromiseRejectCallback", SetPromiseRejectCallback); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_trace_events.cc b/src/node_trace_events.cc index af60aff4ab7bbe..fd277b997dea6c 100644 --- a/src/node_trace_events.cc +++ b/src/node_trace_events.cc @@ -20,6 +20,7 @@ using v8::Context; using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Isolate; using v8::Local; using v8::NewStringType; using v8::Object; @@ -124,21 +125,23 @@ void NodeCategorySet::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - env->SetMethod(target, "getEnabledCategories", GetEnabledCategories); - env->SetMethod( - target, "setTraceCategoryStateUpdateHandler", - SetTraceCategoryStateUpdateHandler); + SetMethod(context, target, "getEnabledCategories", GetEnabledCategories); + SetMethod(context, + target, + "setTraceCategoryStateUpdateHandler", + SetTraceCategoryStateUpdateHandler); Local category_set = - env->NewFunctionTemplate(NodeCategorySet::New); + NewFunctionTemplate(isolate, NodeCategorySet::New); category_set->InstanceTemplate()->SetInternalFieldCount( NodeCategorySet::kInternalFieldCount); category_set->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(category_set, "enable", NodeCategorySet::Enable); - env->SetProtoMethod(category_set, "disable", NodeCategorySet::Disable); + SetProtoMethod(isolate, category_set, "enable", NodeCategorySet::Enable); + SetProtoMethod(isolate, category_set, "disable", NodeCategorySet::Disable); - env->SetConstructorFunction(target, "CategorySet", category_set); + SetConstructorFunction(context, target, "CategorySet", category_set); Local isTraceCategoryEnabled = FIXED_ONE_BYTE_STRING(env->isolate(), "isTraceCategoryEnabled"); diff --git a/src/node_types.cc b/src/node_types.cc index 1889d8c304110b..87550a1428bd34 100644 --- a/src/node_types.cc +++ b/src/node_types.cc @@ -65,16 +65,12 @@ void InitializeTypes(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - -#define V(type) env->SetMethodNoSideEffect(target, \ - "is" #type, \ - Is##type); +#define V(type) SetMethodNoSideEffect(context, target, "is" #type, Is##type); VALUE_METHOD_MAP(V) #undef V - env->SetMethodNoSideEffect(target, "isAnyArrayBuffer", IsAnyArrayBuffer); - env->SetMethodNoSideEffect(target, "isBoxedPrimitive", IsBoxedPrimitive); + SetMethodNoSideEffect(context, target, "isAnyArrayBuffer", IsAnyArrayBuffer); + SetMethodNoSideEffect(context, target, "isBoxedPrimitive", IsBoxedPrimitive); } } // anonymous namespace diff --git a/src/node_url.cc b/src/node_url.cc index 1f9f4e40b084cb..ecc7cee6a39ffb 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -1800,12 +1800,11 @@ void Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "parse", Parse); - env->SetMethodNoSideEffect(target, "encodeAuth", EncodeAuthSet); - env->SetMethodNoSideEffect(target, "domainToASCII", DomainToASCII); - env->SetMethodNoSideEffect(target, "domainToUnicode", DomainToUnicode); - env->SetMethod(target, "setURLConstructor", SetURLConstructor); + SetMethod(context, target, "parse", Parse); + SetMethodNoSideEffect(context, target, "encodeAuth", EncodeAuthSet); + SetMethodNoSideEffect(context, target, "domainToASCII", DomainToASCII); + SetMethodNoSideEffect(context, target, "domainToUnicode", DomainToUnicode); + SetMethod(context, target, "setURLConstructor", SetURLConstructor); #define XX(name, _) NODE_DEFINE_CONSTANT(target, name); FLAGS(XX) diff --git a/src/node_util.cc b/src/node_util.cc index 5b5dab36f08fbf..1613a276c58111 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -347,6 +347,7 @@ void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); #define V(name, _) \ target->Set(context, \ @@ -368,18 +369,21 @@ void Initialize(Local target, V(kRejected); #undef V - env->SetMethodNoSideEffect(target, "getHiddenValue", GetHiddenValue); - env->SetMethod(target, "setHiddenValue", SetHiddenValue); - env->SetMethodNoSideEffect(target, "getPromiseDetails", GetPromiseDetails); - env->SetMethodNoSideEffect(target, "getProxyDetails", GetProxyDetails); - env->SetMethodNoSideEffect(target, "previewEntries", PreviewEntries); - env->SetMethodNoSideEffect(target, "getOwnNonIndexProperties", - GetOwnNonIndexProperties); - env->SetMethodNoSideEffect(target, "getConstructorName", GetConstructorName); - env->SetMethodNoSideEffect(target, "getExternalValue", GetExternalValue); - env->SetMethod(target, "sleep", Sleep); - - env->SetMethod(target, "arrayBufferViewHasBuffer", ArrayBufferViewHasBuffer); + SetMethodNoSideEffect(context, target, "getHiddenValue", GetHiddenValue); + SetMethod(context, target, "setHiddenValue", SetHiddenValue); + SetMethodNoSideEffect( + context, target, "getPromiseDetails", GetPromiseDetails); + SetMethodNoSideEffect(context, target, "getProxyDetails", GetProxyDetails); + SetMethodNoSideEffect(context, target, "previewEntries", PreviewEntries); + SetMethodNoSideEffect( + context, target, "getOwnNonIndexProperties", GetOwnNonIndexProperties); + SetMethodNoSideEffect( + context, target, "getConstructorName", GetConstructorName); + SetMethodNoSideEffect(context, target, "getExternalValue", GetExternalValue); + SetMethod(context, target, "sleep", Sleep); + + SetMethod( + context, target, "arrayBufferViewHasBuffer", ArrayBufferViewHasBuffer); Local constants = Object::New(env->isolate()); NODE_DEFINE_CONSTANT(constants, ALL_PROPERTIES); NODE_DEFINE_CONSTANT(constants, ONLY_WRITABLE); @@ -394,24 +398,24 @@ void Initialize(Local target, Local should_abort_on_uncaught_toggle = FIXED_ONE_BYTE_STRING(env->isolate(), "shouldAbortOnUncaughtToggle"); CHECK(target - ->Set(env->context(), + ->Set(context, should_abort_on_uncaught_toggle, env->should_abort_on_uncaught_toggle().GetJSArray()) .FromJust()); Local weak_ref = - env->NewFunctionTemplate(WeakReference::New); + NewFunctionTemplate(isolate, WeakReference::New); weak_ref->InstanceTemplate()->SetInternalFieldCount( WeakReference::kInternalFieldCount); weak_ref->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(weak_ref, "get", WeakReference::Get); - env->SetProtoMethod(weak_ref, "incRef", WeakReference::IncRef); - env->SetProtoMethod(weak_ref, "decRef", WeakReference::DecRef); - env->SetConstructorFunction(target, "WeakReference", weak_ref); + SetProtoMethod(isolate, weak_ref, "get", WeakReference::Get); + SetProtoMethod(isolate, weak_ref, "incRef", WeakReference::IncRef); + SetProtoMethod(isolate, weak_ref, "decRef", WeakReference::DecRef); + SetConstructorFunction(context, target, "WeakReference", weak_ref); - env->SetMethod(target, "guessHandleType", GuessHandleType); + SetMethod(context, target, "guessHandleType", GuessHandleType); - env->SetMethodNoSideEffect(target, "toUSVString", ToUSVString); + SetMethodNoSideEffect(context, target, "toUSVString", ToUSVString); } } // namespace util diff --git a/src/node_v8.cc b/src/node_v8.cc index cf7a494b7c7230..5a1346a904e75e 100644 --- a/src/node_v8.cc +++ b/src/node_v8.cc @@ -206,13 +206,17 @@ void Initialize(Local target, env->AddBindingData(context, target); if (binding_data == nullptr) return; - env->SetMethodNoSideEffect(target, "cachedDataVersionTag", - CachedDataVersionTag); - env->SetMethod( - target, "updateHeapStatisticsBuffer", UpdateHeapStatisticsBuffer); - - env->SetMethod( - target, "updateHeapCodeStatisticsBuffer", UpdateHeapCodeStatisticsBuffer); + SetMethodNoSideEffect( + context, target, "cachedDataVersionTag", CachedDataVersionTag); + SetMethod(context, + target, + "updateHeapStatisticsBuffer", + UpdateHeapStatisticsBuffer); + + SetMethod(context, + target, + "updateHeapCodeStatisticsBuffer", + UpdateHeapCodeStatisticsBuffer); size_t number_of_heap_spaces = env->isolate()->NumberOfHeapSpaces(); @@ -225,19 +229,21 @@ void Initialize(Local target, heap_spaces[i] = String::NewFromUtf8(env->isolate(), s.space_name()) .ToLocalChecked(); } - target->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "kHeapSpaces"), - Array::New(env->isolate(), - heap_spaces.out(), - number_of_heap_spaces)).Check(); + target + ->Set( + context, + FIXED_ONE_BYTE_STRING(env->isolate(), "kHeapSpaces"), + Array::New(env->isolate(), heap_spaces.out(), number_of_heap_spaces)) + .Check(); - env->SetMethod(target, - "updateHeapSpaceStatisticsBuffer", - UpdateHeapSpaceStatisticsBuffer); + SetMethod(context, + target, + "updateHeapSpaceStatisticsBuffer", + UpdateHeapSpaceStatisticsBuffer); #define V(i, _, name) \ target \ - ->Set(env->context(), \ + ->Set(context, \ FIXED_ONE_BYTE_STRING(env->isolate(), #name), \ Uint32::NewFromUnsigned(env->isolate(), i)) \ .Check(); @@ -248,7 +254,7 @@ void Initialize(Local target, #undef V // Export symbols used by v8.setFlagsFromString() - env->SetMethod(target, "setFlagsFromString", SetFlagsFromString); + SetMethod(context, target, "setFlagsFromString", SetFlagsFromString); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_wasi.cc b/src/node_wasi.cc index 965a619c8d4acd..a1b244018a6760 100644 --- a/src/node_wasi.cc +++ b/src/node_wasi.cc @@ -1668,62 +1668,67 @@ static void Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local tmpl = env->NewFunctionTemplate(WASI::New); + Local tmpl = NewFunctionTemplate(isolate, WASI::New); tmpl->InstanceTemplate()->SetInternalFieldCount(WASI::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); - env->SetProtoMethod(tmpl, "args_get", WASI::ArgsGet); - env->SetProtoMethod(tmpl, "args_sizes_get", WASI::ArgsSizesGet); - env->SetProtoMethod(tmpl, "clock_res_get", WASI::ClockResGet); - env->SetProtoMethod(tmpl, "clock_time_get", WASI::ClockTimeGet); - env->SetProtoMethod(tmpl, "environ_get", WASI::EnvironGet); - env->SetProtoMethod(tmpl, "environ_sizes_get", WASI::EnvironSizesGet); - env->SetProtoMethod(tmpl, "fd_advise", WASI::FdAdvise); - env->SetProtoMethod(tmpl, "fd_allocate", WASI::FdAllocate); - env->SetProtoMethod(tmpl, "fd_close", WASI::FdClose); - env->SetProtoMethod(tmpl, "fd_datasync", WASI::FdDatasync); - env->SetProtoMethod(tmpl, "fd_fdstat_get", WASI::FdFdstatGet); - env->SetProtoMethod(tmpl, "fd_fdstat_set_flags", WASI::FdFdstatSetFlags); - env->SetProtoMethod(tmpl, "fd_fdstat_set_rights", WASI::FdFdstatSetRights); - env->SetProtoMethod(tmpl, "fd_filestat_get", WASI::FdFilestatGet); - env->SetProtoMethod(tmpl, "fd_filestat_set_size", WASI::FdFilestatSetSize); - env->SetProtoMethod(tmpl, "fd_filestat_set_times", WASI::FdFilestatSetTimes); - env->SetProtoMethod(tmpl, "fd_pread", WASI::FdPread); - env->SetProtoMethod(tmpl, "fd_prestat_get", WASI::FdPrestatGet); - env->SetProtoMethod(tmpl, "fd_prestat_dir_name", WASI::FdPrestatDirName); - env->SetProtoMethod(tmpl, "fd_pwrite", WASI::FdPwrite); - env->SetProtoMethod(tmpl, "fd_read", WASI::FdRead); - env->SetProtoMethod(tmpl, "fd_readdir", WASI::FdReaddir); - env->SetProtoMethod(tmpl, "fd_renumber", WASI::FdRenumber); - env->SetProtoMethod(tmpl, "fd_seek", WASI::FdSeek); - env->SetProtoMethod(tmpl, "fd_sync", WASI::FdSync); - env->SetProtoMethod(tmpl, "fd_tell", WASI::FdTell); - env->SetProtoMethod(tmpl, "fd_write", WASI::FdWrite); - env->SetProtoMethod(tmpl, "path_create_directory", WASI::PathCreateDirectory); - env->SetProtoMethod(tmpl, "path_filestat_get", WASI::PathFilestatGet); - env->SetProtoMethod(tmpl, - "path_filestat_set_times", - WASI::PathFilestatSetTimes); - env->SetProtoMethod(tmpl, "path_link", WASI::PathLink); - env->SetProtoMethod(tmpl, "path_open", WASI::PathOpen); - env->SetProtoMethod(tmpl, "path_readlink", WASI::PathReadlink); - env->SetProtoMethod(tmpl, "path_remove_directory", WASI::PathRemoveDirectory); - env->SetProtoMethod(tmpl, "path_rename", WASI::PathRename); - env->SetProtoMethod(tmpl, "path_symlink", WASI::PathSymlink); - env->SetProtoMethod(tmpl, "path_unlink_file", WASI::PathUnlinkFile); - env->SetProtoMethod(tmpl, "poll_oneoff", WASI::PollOneoff); - env->SetProtoMethod(tmpl, "proc_exit", WASI::ProcExit); - env->SetProtoMethod(tmpl, "proc_raise", WASI::ProcRaise); - env->SetProtoMethod(tmpl, "random_get", WASI::RandomGet); - env->SetProtoMethod(tmpl, "sched_yield", WASI::SchedYield); - env->SetProtoMethod(tmpl, "sock_recv", WASI::SockRecv); - env->SetProtoMethod(tmpl, "sock_send", WASI::SockSend); - env->SetProtoMethod(tmpl, "sock_shutdown", WASI::SockShutdown); - - env->SetInstanceMethod(tmpl, "_setMemory", WASI::_SetMemory); - - env->SetConstructorFunction(target, "WASI", tmpl); + SetProtoMethod(isolate, tmpl, "args_get", WASI::ArgsGet); + SetProtoMethod(isolate, tmpl, "args_sizes_get", WASI::ArgsSizesGet); + SetProtoMethod(isolate, tmpl, "clock_res_get", WASI::ClockResGet); + SetProtoMethod(isolate, tmpl, "clock_time_get", WASI::ClockTimeGet); + SetProtoMethod(isolate, tmpl, "environ_get", WASI::EnvironGet); + SetProtoMethod(isolate, tmpl, "environ_sizes_get", WASI::EnvironSizesGet); + SetProtoMethod(isolate, tmpl, "fd_advise", WASI::FdAdvise); + SetProtoMethod(isolate, tmpl, "fd_allocate", WASI::FdAllocate); + SetProtoMethod(isolate, tmpl, "fd_close", WASI::FdClose); + SetProtoMethod(isolate, tmpl, "fd_datasync", WASI::FdDatasync); + SetProtoMethod(isolate, tmpl, "fd_fdstat_get", WASI::FdFdstatGet); + SetProtoMethod(isolate, tmpl, "fd_fdstat_set_flags", WASI::FdFdstatSetFlags); + SetProtoMethod( + isolate, tmpl, "fd_fdstat_set_rights", WASI::FdFdstatSetRights); + SetProtoMethod(isolate, tmpl, "fd_filestat_get", WASI::FdFilestatGet); + SetProtoMethod( + isolate, tmpl, "fd_filestat_set_size", WASI::FdFilestatSetSize); + SetProtoMethod( + isolate, tmpl, "fd_filestat_set_times", WASI::FdFilestatSetTimes); + SetProtoMethod(isolate, tmpl, "fd_pread", WASI::FdPread); + SetProtoMethod(isolate, tmpl, "fd_prestat_get", WASI::FdPrestatGet); + SetProtoMethod(isolate, tmpl, "fd_prestat_dir_name", WASI::FdPrestatDirName); + SetProtoMethod(isolate, tmpl, "fd_pwrite", WASI::FdPwrite); + SetProtoMethod(isolate, tmpl, "fd_read", WASI::FdRead); + SetProtoMethod(isolate, tmpl, "fd_readdir", WASI::FdReaddir); + SetProtoMethod(isolate, tmpl, "fd_renumber", WASI::FdRenumber); + SetProtoMethod(isolate, tmpl, "fd_seek", WASI::FdSeek); + SetProtoMethod(isolate, tmpl, "fd_sync", WASI::FdSync); + SetProtoMethod(isolate, tmpl, "fd_tell", WASI::FdTell); + SetProtoMethod(isolate, tmpl, "fd_write", WASI::FdWrite); + SetProtoMethod( + isolate, tmpl, "path_create_directory", WASI::PathCreateDirectory); + SetProtoMethod(isolate, tmpl, "path_filestat_get", WASI::PathFilestatGet); + SetProtoMethod( + isolate, tmpl, "path_filestat_set_times", WASI::PathFilestatSetTimes); + SetProtoMethod(isolate, tmpl, "path_link", WASI::PathLink); + SetProtoMethod(isolate, tmpl, "path_open", WASI::PathOpen); + SetProtoMethod(isolate, tmpl, "path_readlink", WASI::PathReadlink); + SetProtoMethod( + isolate, tmpl, "path_remove_directory", WASI::PathRemoveDirectory); + SetProtoMethod(isolate, tmpl, "path_rename", WASI::PathRename); + SetProtoMethod(isolate, tmpl, "path_symlink", WASI::PathSymlink); + SetProtoMethod(isolate, tmpl, "path_unlink_file", WASI::PathUnlinkFile); + SetProtoMethod(isolate, tmpl, "poll_oneoff", WASI::PollOneoff); + SetProtoMethod(isolate, tmpl, "proc_exit", WASI::ProcExit); + SetProtoMethod(isolate, tmpl, "proc_raise", WASI::ProcRaise); + SetProtoMethod(isolate, tmpl, "random_get", WASI::RandomGet); + SetProtoMethod(isolate, tmpl, "sched_yield", WASI::SchedYield); + SetProtoMethod(isolate, tmpl, "sock_recv", WASI::SockRecv); + SetProtoMethod(isolate, tmpl, "sock_send", WASI::SockSend); + SetProtoMethod(isolate, tmpl, "sock_shutdown", WASI::SockShutdown); + + SetInstanceMethod(isolate, tmpl, "_setMemory", WASI::_SetMemory); + + SetConstructorFunction(context, target, "WASI", tmpl); } diff --git a/src/node_wasm_web_api.cc b/src/node_wasm_web_api.cc index 67437034bbee34..7fc423978b2a7c 100644 --- a/src/node_wasm_web_api.cc +++ b/src/node_wasm_web_api.cc @@ -13,6 +13,7 @@ using v8::Context; using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -25,15 +26,16 @@ Local WasmStreamingObject::Initialize(Environment* env) { return templ; } - Local t = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local t = NewFunctionTemplate(isolate, New); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount( WasmStreamingObject::kInternalFieldCount); - env->SetProtoMethod(t, "setURL", SetURL); - env->SetProtoMethod(t, "push", Push); - env->SetProtoMethod(t, "finish", Finish); - env->SetProtoMethod(t, "abort", Abort); + SetProtoMethod(isolate, t, "setURL", SetURL); + SetProtoMethod(isolate, t, "push", Push); + SetProtoMethod(isolate, t, "finish", Finish); + SetProtoMethod(isolate, t, "abort", Abort); auto function = t->GetFunction(env->context()).ToLocalChecked(); env->set_wasm_streaming_object_constructor(function); @@ -194,8 +196,7 @@ void Initialize(Local target, Local, Local context, void*) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "setImplementation", SetImplementation); + SetMethod(context, target, "setImplementation", SetImplementation); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/node_watchdog.cc b/src/node_watchdog.cc index 31c8f744a3b320..7062d4aa661abb 100644 --- a/src/node_watchdog.cc +++ b/src/node_watchdog.cc @@ -34,6 +34,7 @@ namespace node { using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; @@ -123,15 +124,17 @@ SignalPropagation SigintWatchdog::HandleSigint() { } void TraceSigintWatchdog::Init(Environment* env, Local target) { - Local constructor = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local constructor = NewFunctionTemplate(isolate, New); constructor->InstanceTemplate()->SetInternalFieldCount( TraceSigintWatchdog::kInternalFieldCount); constructor->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(constructor, "start", Start); - env->SetProtoMethod(constructor, "stop", Stop); + SetProtoMethod(isolate, constructor, "start", Start); + SetProtoMethod(isolate, constructor, "stop", Stop); - env->SetConstructorFunction(target, "TraceSigintWatchdog", constructor); + SetConstructorFunction( + env->context(), target, "TraceSigintWatchdog", constructor); } void TraceSigintWatchdog::New(const FunctionCallbackInfo& args) { diff --git a/src/node_worker.cc b/src/node_worker.cc index 8385bc96231257..5ddaae49c7ae00 100644 --- a/src/node_worker.cc +++ b/src/node_worker.cc @@ -840,65 +840,66 @@ void InitWorker(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); { - Local w = env->NewFunctionTemplate(Worker::New); + Local w = NewFunctionTemplate(isolate, Worker::New); w->InstanceTemplate()->SetInternalFieldCount( Worker::kInternalFieldCount); w->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(w, "startThread", Worker::StartThread); - env->SetProtoMethod(w, "stopThread", Worker::StopThread); - env->SetProtoMethod(w, "hasRef", Worker::HasRef); - env->SetProtoMethod(w, "ref", Worker::Ref); - env->SetProtoMethod(w, "unref", Worker::Unref); - env->SetProtoMethod(w, "getResourceLimits", Worker::GetResourceLimits); - env->SetProtoMethod(w, "takeHeapSnapshot", Worker::TakeHeapSnapshot); - env->SetProtoMethod(w, "loopIdleTime", Worker::LoopIdleTime); - env->SetProtoMethod(w, "loopStartTime", Worker::LoopStartTime); - - env->SetConstructorFunction(target, "Worker", w); + SetProtoMethod(isolate, w, "startThread", Worker::StartThread); + SetProtoMethod(isolate, w, "stopThread", Worker::StopThread); + SetProtoMethod(isolate, w, "hasRef", Worker::HasRef); + SetProtoMethod(isolate, w, "ref", Worker::Ref); + SetProtoMethod(isolate, w, "unref", Worker::Unref); + SetProtoMethod(isolate, w, "getResourceLimits", Worker::GetResourceLimits); + SetProtoMethod(isolate, w, "takeHeapSnapshot", Worker::TakeHeapSnapshot); + SetProtoMethod(isolate, w, "loopIdleTime", Worker::LoopIdleTime); + SetProtoMethod(isolate, w, "loopStartTime", Worker::LoopStartTime); + + SetConstructorFunction(context, target, "Worker", w); } { - Local wst = FunctionTemplate::New(env->isolate()); + Local wst = NewFunctionTemplate(isolate, nullptr); wst->InstanceTemplate()->SetInternalFieldCount( WorkerHeapSnapshotTaker::kInternalFieldCount); wst->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local wst_string = - FIXED_ONE_BYTE_STRING(env->isolate(), "WorkerHeapSnapshotTaker"); + FIXED_ONE_BYTE_STRING(isolate, "WorkerHeapSnapshotTaker"); wst->SetClassName(wst_string); env->set_worker_heap_snapshot_taker_template(wst->InstanceTemplate()); } - env->SetMethod(target, "getEnvMessagePort", GetEnvMessagePort); + SetMethod(context, target, "getEnvMessagePort", GetEnvMessagePort); target ->Set(env->context(), env->thread_id_string(), - Number::New(env->isolate(), static_cast(env->thread_id()))) + Number::New(isolate, static_cast(env->thread_id()))) .Check(); target ->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "isMainThread"), - Boolean::New(env->isolate(), env->is_main_thread())) + FIXED_ONE_BYTE_STRING(isolate, "isMainThread"), + Boolean::New(isolate, env->is_main_thread())) .Check(); target ->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "ownsProcessState"), - Boolean::New(env->isolate(), env->owns_process_state())) + FIXED_ONE_BYTE_STRING(isolate, "ownsProcessState"), + Boolean::New(isolate, env->owns_process_state())) .Check(); if (!env->is_main_thread()) { target ->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "resourceLimits"), - env->worker_context()->GetResourceLimits(env->isolate())) + FIXED_ONE_BYTE_STRING(isolate, "resourceLimits"), + env->worker_context()->GetResourceLimits(isolate)) .Check(); } diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 5930ffd7a8ae8e..e2433d887b421a 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -52,6 +52,7 @@ using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Uint32Array; @@ -1256,21 +1257,22 @@ CompressionError BrotliDecoderContext::GetErrorInfo() const { template struct MakeClass { static void Make(Environment* env, Local target, const char* name) { - Local z = env->NewFunctionTemplate(Stream::New); + Isolate* isolate = env->isolate(); + Local z = NewFunctionTemplate(isolate, Stream::New); z->InstanceTemplate()->SetInternalFieldCount( Stream::kInternalFieldCount); z->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(z, "write", Stream::template Write); - env->SetProtoMethod(z, "writeSync", Stream::template Write); - env->SetProtoMethod(z, "close", Stream::Close); + SetProtoMethod(isolate, z, "write", Stream::template Write); + SetProtoMethod(isolate, z, "writeSync", Stream::template Write); + SetProtoMethod(isolate, z, "close", Stream::Close); - env->SetProtoMethod(z, "init", Stream::Init); - env->SetProtoMethod(z, "params", Stream::Params); - env->SetProtoMethod(z, "reset", Stream::Reset); + SetProtoMethod(isolate, z, "init", Stream::Init); + SetProtoMethod(isolate, z, "params", Stream::Params); + SetProtoMethod(isolate, z, "reset", Stream::Reset); - env->SetConstructorFunction(target, name, z); + SetConstructorFunction(env->context(), target, name, z); } static void Make(ExternalReferenceRegistry* registry) { diff --git a/src/pipe_wrap.cc b/src/pipe_wrap.cc index 85daf4a1e618ae..4f1bfda569f910 100644 --- a/src/pipe_wrap.cc +++ b/src/pipe_wrap.cc @@ -41,6 +41,7 @@ using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -67,31 +68,32 @@ void PipeWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate() ->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "bind", Bind); - env->SetProtoMethod(t, "listen", Listen); - env->SetProtoMethod(t, "connect", Connect); - env->SetProtoMethod(t, "open", Open); + SetProtoMethod(isolate, t, "bind", Bind); + SetProtoMethod(isolate, t, "listen", Listen); + SetProtoMethod(isolate, t, "connect", Connect); + SetProtoMethod(isolate, t, "open", Open); #ifdef _WIN32 - env->SetProtoMethod(t, "setPendingInstances", SetPendingInstances); + SetProtoMethod(isolate, t, "setPendingInstances", SetPendingInstances); #endif - env->SetProtoMethod(t, "fchmod", Fchmod); + SetProtoMethod(isolate, t, "fchmod", Fchmod); - env->SetConstructorFunction(target, "Pipe", t); + SetConstructorFunction(context, target, "Pipe", t); env->set_pipe_constructor_template(t); // Create FunctionTemplate for PipeConnectWrap. auto cwt = BaseObject::MakeLazilyInitializedJSTemplate(env); cwt->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "PipeConnectWrap", cwt); + SetConstructorFunction(context, target, "PipeConnectWrap", cwt); // Define constants Local constants = Object::New(env->isolate()); diff --git a/src/process_wrap.cc b/src/process_wrap.cc index 6905cacb1c8193..3a55048ef79a0e 100644 --- a/src/process_wrap.cc +++ b/src/process_wrap.cc @@ -36,6 +36,7 @@ using v8::FunctionTemplate; using v8::HandleScope; using v8::Int32; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Number; using v8::Object; @@ -51,16 +52,17 @@ class ProcessWrap : public HandleWrap { Local context, void* priv) { Environment* env = Environment::GetCurrent(context); - Local constructor = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local constructor = NewFunctionTemplate(isolate, New); constructor->InstanceTemplate()->SetInternalFieldCount( ProcessWrap::kInternalFieldCount); constructor->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(constructor, "spawn", Spawn); - env->SetProtoMethod(constructor, "kill", Kill); + SetProtoMethod(isolate, constructor, "spawn", Spawn); + SetProtoMethod(isolate, constructor, "kill", Kill); - env->SetConstructorFunction(target, "Process", constructor); + SetConstructorFunction(context, target, "Process", constructor); } SET_NO_MEMORY_INFO() diff --git a/src/signal_wrap.cc b/src/signal_wrap.cc index df7f94eeec8451..d5dfdd51da656a 100644 --- a/src/signal_wrap.cc +++ b/src/signal_wrap.cc @@ -34,6 +34,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; @@ -52,15 +53,16 @@ class SignalWrap : public HandleWrap { Local context, void* priv) { Environment* env = Environment::GetCurrent(context); - Local constructor = env->NewFunctionTemplate(New); + Isolate* isolate = env->isolate(); + Local constructor = NewFunctionTemplate(isolate, New); constructor->InstanceTemplate()->SetInternalFieldCount( SignalWrap::kInternalFieldCount); constructor->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(constructor, "start", Start); - env->SetProtoMethod(constructor, "stop", Stop); + SetProtoMethod(isolate, constructor, "start", Start); + SetProtoMethod(isolate, constructor, "stop", Stop); - env->SetConstructorFunction(target, "Signal", constructor); + SetConstructorFunction(context, target, "Signal", constructor); } static void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/spawn_sync.cc b/src/spawn_sync.cc index afd08519d7f897..2db7d82d32f13a 100644 --- a/src/spawn_sync.cc +++ b/src/spawn_sync.cc @@ -363,8 +363,7 @@ void SyncProcessRunner::Initialize(Local target, Local unused, Local context, void* priv) { - Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "spawn", Spawn); + SetMethod(context, target, "spawn", Spawn); } diff --git a/src/stream_base.cc b/src/stream_base.cc index 783e12a36c8e52..8701434c24fb26 100644 --- a/src/stream_base.cc +++ b/src/stream_base.cc @@ -400,21 +400,24 @@ void StreamBase::AddMethod(Environment* env, Local t, JSMethodFunction* stream_method, Local string) { + Isolate* isolate = env->isolate(); Local templ = - env->NewFunctionTemplate(stream_method, - signature, - ConstructorBehavior::kThrow, - SideEffectType::kHasNoSideEffect); + NewFunctionTemplate(isolate, + stream_method, + signature, + ConstructorBehavior::kThrow, + SideEffectType::kHasNoSideEffect); t->PrototypeTemplate()->SetAccessorProperty( string, templ, Local(), attributes); } void StreamBase::AddMethods(Environment* env, Local t) { - HandleScope scope(env->isolate()); + Isolate* isolate = env->isolate(); + HandleScope scope(isolate); enum PropertyAttribute attributes = static_cast(ReadOnly | DontDelete | DontEnum); - Local sig = Signature::New(env->isolate(), t); + Local sig = Signature::New(isolate, t); AddMethod(env, sig, attributes, t, GetFD, env->fd_string()); AddMethod( @@ -422,32 +425,32 @@ void StreamBase::AddMethods(Environment* env, Local t) { AddMethod(env, sig, attributes, t, GetBytesRead, env->bytes_read_string()); AddMethod( env, sig, attributes, t, GetBytesWritten, env->bytes_written_string()); - env->SetProtoMethod(t, "readStart", JSMethod<&StreamBase::ReadStartJS>); - env->SetProtoMethod(t, "readStop", JSMethod<&StreamBase::ReadStopJS>); - env->SetProtoMethod(t, "shutdown", JSMethod<&StreamBase::Shutdown>); - env->SetProtoMethod(t, - "useUserBuffer", - JSMethod<&StreamBase::UseUserBuffer>); - env->SetProtoMethod(t, "writev", JSMethod<&StreamBase::Writev>); - env->SetProtoMethod(t, "writeBuffer", JSMethod<&StreamBase::WriteBuffer>); - env->SetProtoMethod( - t, "writeAsciiString", JSMethod<&StreamBase::WriteString>); - env->SetProtoMethod( - t, "writeUtf8String", JSMethod<&StreamBase::WriteString>); - env->SetProtoMethod( - t, "writeUcs2String", JSMethod<&StreamBase::WriteString>); - env->SetProtoMethod( - t, "writeLatin1String", JSMethod<&StreamBase::WriteString>); - t->PrototypeTemplate()->Set(FIXED_ONE_BYTE_STRING(env->isolate(), - "isStreamBase"), - True(env->isolate())); + SetProtoMethod(isolate, t, "readStart", JSMethod<&StreamBase::ReadStartJS>); + SetProtoMethod(isolate, t, "readStop", JSMethod<&StreamBase::ReadStopJS>); + SetProtoMethod(isolate, t, "shutdown", JSMethod<&StreamBase::Shutdown>); + SetProtoMethod( + isolate, t, "useUserBuffer", JSMethod<&StreamBase::UseUserBuffer>); + SetProtoMethod(isolate, t, "writev", JSMethod<&StreamBase::Writev>); + SetProtoMethod(isolate, t, "writeBuffer", JSMethod<&StreamBase::WriteBuffer>); + SetProtoMethod(isolate, + t, + "writeAsciiString", + JSMethod<&StreamBase::WriteString>); + SetProtoMethod( + isolate, t, "writeUtf8String", JSMethod<&StreamBase::WriteString>); + SetProtoMethod( + isolate, t, "writeUcs2String", JSMethod<&StreamBase::WriteString>); + SetProtoMethod(isolate, + t, + "writeLatin1String", + JSMethod<&StreamBase::WriteString>); + t->PrototypeTemplate()->Set(FIXED_ONE_BYTE_STRING(isolate, "isStreamBase"), + True(isolate)); t->PrototypeTemplate()->SetAccessor( - FIXED_ONE_BYTE_STRING(env->isolate(), "onread"), - BaseObject::InternalFieldGet< - StreamBase::kOnReadFunctionField>, - BaseObject::InternalFieldSet< - StreamBase::kOnReadFunctionField, - &Value::IsFunction>); + FIXED_ONE_BYTE_STRING(isolate, "onread"), + BaseObject::InternalFieldGet, + BaseObject::InternalFieldSet); } void StreamBase::RegisterExternalReferences( diff --git a/src/stream_pipe.cc b/src/stream_pipe.cc index 93b7ffeca9cd53..7d4be978a11c4c 100644 --- a/src/stream_pipe.cc +++ b/src/stream_pipe.cc @@ -11,6 +11,7 @@ using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; +using v8::Isolate; using v8::Just; using v8::Local; using v8::Maybe; @@ -313,17 +314,18 @@ void InitializeStreamPipe(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); // Create FunctionTemplate for FileHandle::CloseReq - Local pipe = env->NewFunctionTemplate(StreamPipe::New); - env->SetProtoMethod(pipe, "unpipe", StreamPipe::Unpipe); - env->SetProtoMethod(pipe, "start", StreamPipe::Start); - env->SetProtoMethod(pipe, "isClosed", StreamPipe::IsClosed); - env->SetProtoMethod(pipe, "pendingWrites", StreamPipe::PendingWrites); + Local pipe = NewFunctionTemplate(isolate, StreamPipe::New); + SetProtoMethod(isolate, pipe, "unpipe", StreamPipe::Unpipe); + SetProtoMethod(isolate, pipe, "start", StreamPipe::Start); + SetProtoMethod(isolate, pipe, "isClosed", StreamPipe::IsClosed); + SetProtoMethod(isolate, pipe, "pendingWrites", StreamPipe::PendingWrites); pipe->Inherit(AsyncWrap::GetConstructorTemplate(env)); pipe->InstanceTemplate()->SetInternalFieldCount( StreamPipe::kInternalFieldCount); - env->SetConstructorFunction(target, "StreamPipe", pipe); + SetConstructorFunction(context, target, "StreamPipe", pipe); } } // anonymous namespace diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index d0c5664adcd897..88f0bce66adfcc 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -46,6 +46,7 @@ using v8::EscapableHandleScope; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; +using v8::Isolate; using v8::JustVoid; using v8::Local; using v8::Maybe; @@ -67,9 +68,10 @@ void LibuvStreamWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); Local sw = - FunctionTemplate::New(env->isolate(), IsConstructCallCallback); + NewFunctionTemplate(isolate, IsConstructCallCallback); sw->InstanceTemplate()->SetInternalFieldCount(StreamReq::kInternalFieldCount); // we need to set handle and callback to null, @@ -79,33 +81,34 @@ void LibuvStreamWrap::Initialize(Local target, // - oncomplete // - callback // - handle - sw->InstanceTemplate()->Set( - env->oncomplete_string(), - v8::Null(env->isolate())); - sw->InstanceTemplate()->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "callback"), - v8::Null(env->isolate())); - sw->InstanceTemplate()->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "handle"), - v8::Null(env->isolate())); + sw->InstanceTemplate()->Set(env->oncomplete_string(), v8::Null(isolate)); + sw->InstanceTemplate()->Set(FIXED_ONE_BYTE_STRING(isolate, "callback"), + v8::Null(isolate)); + sw->InstanceTemplate()->Set(FIXED_ONE_BYTE_STRING(isolate, "handle"), + v8::Null(isolate)); sw->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "ShutdownWrap", sw); + SetConstructorFunction(context, target, "ShutdownWrap", sw); env->set_shutdown_wrap_template(sw->InstanceTemplate()); Local ww = - FunctionTemplate::New(env->isolate(), IsConstructCallCallback); + FunctionTemplate::New(isolate, IsConstructCallCallback); ww->InstanceTemplate()->SetInternalFieldCount( StreamReq::kInternalFieldCount); ww->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "WriteWrap", ww); + SetConstructorFunction(context, target, "WriteWrap", ww); env->set_write_wrap_template(ww->InstanceTemplate()); NODE_DEFINE_CONSTANT(target, kReadBytesOrError); NODE_DEFINE_CONSTANT(target, kArrayBufferOffset); NODE_DEFINE_CONSTANT(target, kBytesWritten); NODE_DEFINE_CONSTANT(target, kLastWriteWasAsync); - target->Set(context, FIXED_ONE_BYTE_STRING(env->isolate(), "streamBaseState"), - env->stream_base_state().GetJSArray()).Check(); + target + ->Set(context, + FIXED_ONE_BYTE_STRING(isolate, "streamBaseState"), + env->stream_base_state().GetJSArray()) + .Check(); } void LibuvStreamWrap::RegisterExternalReferences( @@ -134,23 +137,23 @@ Local LibuvStreamWrap::GetConstructorTemplate( Environment* env) { Local tmpl = env->libuv_stream_wrap_ctor_template(); if (tmpl.IsEmpty()) { - tmpl = env->NewFunctionTemplate(nullptr); - tmpl->SetClassName( - FIXED_ONE_BYTE_STRING(env->isolate(), "LibuvStreamWrap")); + Isolate* isolate = env->isolate(); + tmpl = NewFunctionTemplate(isolate, nullptr); + tmpl->SetClassName(FIXED_ONE_BYTE_STRING(isolate, "LibuvStreamWrap")); tmpl->Inherit(HandleWrap::GetConstructorTemplate(env)); tmpl->InstanceTemplate()->SetInternalFieldCount( StreamBase::kInternalFieldCount); Local get_write_queue_size = - FunctionTemplate::New(env->isolate(), + FunctionTemplate::New(isolate, GetWriteQueueSize, Local(), - Signature::New(env->isolate(), tmpl)); + Signature::New(isolate, tmpl)); tmpl->PrototypeTemplate()->SetAccessorProperty( env->write_queue_size_string(), get_write_queue_size, Local(), static_cast(ReadOnly | DontDelete)); - env->SetProtoMethod(tmpl, "setBlocking", SetBlocking); + SetProtoMethod(isolate, tmpl, "setBlocking", SetBlocking); StreamBase::AddMethods(env, tmpl); env->set_libuv_stream_wrap_ctor_template(tmpl); } diff --git a/src/string_decoder.cc b/src/string_decoder.cc index a915f5744f3b00..b447474c09f583 100644 --- a/src/string_decoder.cc +++ b/src/string_decoder.cc @@ -328,8 +328,8 @@ void InitializeStringDecoder(Local target, FIXED_ONE_BYTE_STRING(isolate, "kSize"), Integer::New(isolate, sizeof(StringDecoder))).Check(); - env->SetMethod(target, "decode", DecodeData); - env->SetMethod(target, "flush", FlushData); + SetMethod(context, target, "decode", DecodeData); + SetMethod(context, target, "flush", FlushData); } } // anonymous namespace diff --git a/src/tcp_wrap.cc b/src/tcp_wrap.cc index 53ecfd2b7abffb..eda5aedf5a17b6 100644 --- a/src/tcp_wrap.cc +++ b/src/tcp_wrap.cc @@ -45,6 +45,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::MaybeLocal; using v8::Object; @@ -73,8 +74,9 @@ void TCPWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount); // Init properties @@ -85,32 +87,36 @@ void TCPWrap::Initialize(Local target, t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env)); - env->SetProtoMethod(t, "open", Open); - env->SetProtoMethod(t, "bind", Bind); - env->SetProtoMethod(t, "listen", Listen); - env->SetProtoMethod(t, "connect", Connect); - env->SetProtoMethod(t, "bind6", Bind6); - env->SetProtoMethod(t, "connect6", Connect6); - env->SetProtoMethod(t, "getsockname", - GetSockOrPeerName); - env->SetProtoMethod(t, "getpeername", - GetSockOrPeerName); - env->SetProtoMethod(t, "setNoDelay", SetNoDelay); - env->SetProtoMethod(t, "setKeepAlive", SetKeepAlive); - env->SetProtoMethod(t, "reset", Reset); + SetProtoMethod(isolate, t, "open", Open); + SetProtoMethod(isolate, t, "bind", Bind); + SetProtoMethod(isolate, t, "listen", Listen); + SetProtoMethod(isolate, t, "connect", Connect); + SetProtoMethod(isolate, t, "bind6", Bind6); + SetProtoMethod(isolate, t, "connect6", Connect6); + SetProtoMethod(isolate, + t, + "getsockname", + GetSockOrPeerName); + SetProtoMethod(isolate, + t, + "getpeername", + GetSockOrPeerName); + SetProtoMethod(isolate, t, "setNoDelay", SetNoDelay); + SetProtoMethod(isolate, t, "setKeepAlive", SetKeepAlive); + SetProtoMethod(isolate, t, "reset", Reset); #ifdef _WIN32 - env->SetProtoMethod(t, "setSimultaneousAccepts", SetSimultaneousAccepts); + SetProtoMethod(isolate, t, "setSimultaneousAccepts", SetSimultaneousAccepts); #endif - env->SetConstructorFunction(target, "TCP", t); + SetConstructorFunction(context, target, "TCP", t); env->set_tcp_constructor_template(t); // Create FunctionTemplate for TCPConnectWrap. Local cwt = BaseObject::MakeLazilyInitializedJSTemplate(env); cwt->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "TCPConnectWrap", cwt); + SetConstructorFunction(context, target, "TCPConnectWrap", cwt); // Define constants Local constants = Object::New(env->isolate()); diff --git a/src/timers.cc b/src/timers.cc index 5014f2c66a5d24..2ecfd25f7eeb8b 100644 --- a/src/timers.cc +++ b/src/timers.cc @@ -48,15 +48,17 @@ void Initialize(Local target, void* priv) { Environment* env = Environment::GetCurrent(context); - env->SetMethod(target, "getLibuvNow", GetLibuvNow); - env->SetMethod(target, "setupTimers", SetupTimers); - env->SetMethod(target, "scheduleTimer", ScheduleTimer); - env->SetMethod(target, "toggleTimerRef", ToggleTimerRef); - env->SetMethod(target, "toggleImmediateRef", ToggleImmediateRef); + SetMethod(context, target, "getLibuvNow", GetLibuvNow); + SetMethod(context, target, "setupTimers", SetupTimers); + SetMethod(context, target, "scheduleTimer", ScheduleTimer); + SetMethod(context, target, "toggleTimerRef", ToggleTimerRef); + SetMethod(context, target, "toggleImmediateRef", ToggleImmediateRef); - target->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "immediateInfo"), - env->immediate_info()->fields().GetJSArray()).Check(); + target + ->Set(context, + FIXED_ONE_BYTE_STRING(env->isolate(), "immediateInfo"), + env->immediate_info()->fields().GetJSArray()) + .Check(); } } // anonymous namespace void RegisterTimerExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/src/tty_wrap.cc b/src/tty_wrap.cc index ca8c9356afe276..fff42c35900875 100644 --- a/src/tty_wrap.cc +++ b/src/tty_wrap.cc @@ -36,6 +36,7 @@ using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Integer; +using v8::Isolate; using v8::Local; using v8::Object; using v8::String; @@ -53,22 +54,24 @@ void TTYWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); Local ttyString = FIXED_ONE_BYTE_STRING(env->isolate(), "TTY"); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->SetClassName(ttyString); t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env)); - env->SetProtoMethodNoSideEffect(t, "getWindowSize", TTYWrap::GetWindowSize); - env->SetProtoMethod(t, "setRawMode", SetRawMode); + SetProtoMethodNoSideEffect( + isolate, t, "getWindowSize", TTYWrap::GetWindowSize); + SetProtoMethod(isolate, t, "setRawMode", SetRawMode); - env->SetMethodNoSideEffect(target, "isTTY", IsTTY); + SetMethodNoSideEffect(context, target, "isTTY", IsTTY); Local func; - if (t->GetFunction(env->context()).ToLocal(&func) && - target->Set(env->context(), ttyString, func).IsJust()) { + if (t->GetFunction(context).ToLocal(&func) && + target->Set(context, ttyString, func).IsJust()) { env->set_tty_constructor_template(t); } } diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index 127a1a6e5d8fe7..a6911e2e921b98 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -110,8 +110,8 @@ UDPWrapBase* UDPWrapBase::FromObject(Local obj) { } void UDPWrapBase::AddMethods(Environment* env, Local t) { - env->SetProtoMethod(t, "recvStart", RecvStart); - env->SetProtoMethod(t, "recvStop", RecvStop); + SetProtoMethod(env->isolate(), t, "recvStart", RecvStart); + SetProtoMethod(env->isolate(), t, "recvStop", RecvStop); } UDPWrap::UDPWrap(Environment* env, Local object) @@ -134,21 +134,19 @@ void UDPWrap::Initialize(Local target, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); - Local t = env->NewFunctionTemplate(New); + Local t = NewFunctionTemplate(isolate, New); t->InstanceTemplate()->SetInternalFieldCount( UDPWrapBase::kInternalFieldCount); enum PropertyAttribute attributes = static_cast(ReadOnly | DontDelete); - Local signature = Signature::New(env->isolate(), t); + Local signature = Signature::New(isolate, t); Local get_fd_templ = - FunctionTemplate::New(env->isolate(), - UDPWrap::GetFD, - Local(), - signature); + FunctionTemplate::New(isolate, UDPWrap::GetFD, Local(), signature); t->PrototypeTemplate()->SetAccessorProperty(env->fd_string(), get_fd_templ, @@ -156,44 +154,47 @@ void UDPWrap::Initialize(Local target, attributes); UDPWrapBase::AddMethods(env, t); - env->SetProtoMethod(t, "open", Open); - env->SetProtoMethod(t, "bind", Bind); - env->SetProtoMethod(t, "connect", Connect); - env->SetProtoMethod(t, "send", Send); - env->SetProtoMethod(t, "bind6", Bind6); - env->SetProtoMethod(t, "connect6", Connect6); - env->SetProtoMethod(t, "send6", Send6); - env->SetProtoMethod(t, "disconnect", Disconnect); - env->SetProtoMethod(t, "getpeername", - GetSockOrPeerName); - env->SetProtoMethod(t, "getsockname", - GetSockOrPeerName); - env->SetProtoMethod(t, "addMembership", AddMembership); - env->SetProtoMethod(t, "dropMembership", DropMembership); - env->SetProtoMethod(t, "addSourceSpecificMembership", - AddSourceSpecificMembership); - env->SetProtoMethod(t, "dropSourceSpecificMembership", - DropSourceSpecificMembership); - env->SetProtoMethod(t, "setMulticastInterface", SetMulticastInterface); - env->SetProtoMethod(t, "setMulticastTTL", SetMulticastTTL); - env->SetProtoMethod(t, "setMulticastLoopback", SetMulticastLoopback); - env->SetProtoMethod(t, "setBroadcast", SetBroadcast); - env->SetProtoMethod(t, "setTTL", SetTTL); - env->SetProtoMethod(t, "bufferSize", BufferSize); + SetProtoMethod(isolate, t, "open", Open); + SetProtoMethod(isolate, t, "bind", Bind); + SetProtoMethod(isolate, t, "connect", Connect); + SetProtoMethod(isolate, t, "send", Send); + SetProtoMethod(isolate, t, "bind6", Bind6); + SetProtoMethod(isolate, t, "connect6", Connect6); + SetProtoMethod(isolate, t, "send6", Send6); + SetProtoMethod(isolate, t, "disconnect", Disconnect); + SetProtoMethod(isolate, + t, + "getpeername", + GetSockOrPeerName); + SetProtoMethod(isolate, + t, + "getsockname", + GetSockOrPeerName); + SetProtoMethod(isolate, t, "addMembership", AddMembership); + SetProtoMethod(isolate, t, "dropMembership", DropMembership); + SetProtoMethod( + isolate, t, "addSourceSpecificMembership", AddSourceSpecificMembership); + SetProtoMethod( + isolate, t, "dropSourceSpecificMembership", DropSourceSpecificMembership); + SetProtoMethod(isolate, t, "setMulticastInterface", SetMulticastInterface); + SetProtoMethod(isolate, t, "setMulticastTTL", SetMulticastTTL); + SetProtoMethod(isolate, t, "setMulticastLoopback", SetMulticastLoopback); + SetProtoMethod(isolate, t, "setBroadcast", SetBroadcast); + SetProtoMethod(isolate, t, "setTTL", SetTTL); + SetProtoMethod(isolate, t, "bufferSize", BufferSize); t->Inherit(HandleWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "UDP", t); - env->set_udp_constructor_function( - t->GetFunction(env->context()).ToLocalChecked()); + SetConstructorFunction(context, target, "UDP", t); + env->set_udp_constructor_function(t->GetFunction(context).ToLocalChecked()); // Create FunctionTemplate for SendWrap Local swt = BaseObject::MakeLazilyInitializedJSTemplate(env); swt->Inherit(AsyncWrap::GetConstructorTemplate(env)); - env->SetConstructorFunction(target, "SendWrap", swt); + SetConstructorFunction(context, target, "SendWrap", swt); - Local constants = Object::New(env->isolate()); + Local constants = Object::New(isolate); NODE_DEFINE_CONSTANT(constants, UV_UDP_IPV6ONLY); NODE_DEFINE_CONSTANT(constants, UV_UDP_REUSEADDR); target->Set(context, diff --git a/src/util.cc b/src/util.cc index b881f9f9f88cee..6bbd9fd926f61a 100644 --- a/src/util.cc +++ b/src/util.cc @@ -317,4 +317,161 @@ std::string DiagnosticFilename::MakeFilename( return oss.str(); } +Local NewFunctionTemplate( + v8::Isolate* isolate, + v8::FunctionCallback callback, + Local signature, + v8::ConstructorBehavior behavior, + v8::SideEffectType side_effect_type, + const v8::CFunction* c_function) { + return v8::FunctionTemplate::New(isolate, + callback, + Local(), + signature, + 0, + behavior, + side_effect_type, + c_function); +} + +void SetMethod(Local context, + Local that, + const char* name, + v8::FunctionCallback callback) { + Isolate* isolate = context->GetIsolate(); + Local function = + NewFunctionTemplate(isolate, + callback, + Local(), + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect) + ->GetFunction(context) + .ToLocalChecked(); + // kInternalized strings are created in the old space. + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->Set(context, name_string, function).Check(); + function->SetName(name_string); // NODE_SET_METHOD() compatibility. +} + +void SetFastMethod(Local context, + Local that, + const char* name, + v8::FunctionCallback slow_callback, + const v8::CFunction* c_function) { + Isolate* isolate = context->GetIsolate(); + Local function = + NewFunctionTemplate(isolate, + slow_callback, + Local(), + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasNoSideEffect, + c_function) + ->GetFunction(context) + .ToLocalChecked(); + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->Set(context, name_string, function).Check(); +} + +void SetMethodNoSideEffect(Local context, + Local that, + const char* name, + v8::FunctionCallback callback) { + Isolate* isolate = context->GetIsolate(); + Local function = + NewFunctionTemplate(isolate, + callback, + Local(), + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasNoSideEffect) + ->GetFunction(context) + .ToLocalChecked(); + // kInternalized strings are created in the old space. + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->Set(context, name_string, function).Check(); + function->SetName(name_string); // NODE_SET_METHOD() compatibility. +} + +void SetProtoMethod(v8::Isolate* isolate, + Local that, + const char* name, + v8::FunctionCallback callback) { + Local signature = v8::Signature::New(isolate, that); + Local t = + NewFunctionTemplate(isolate, + callback, + signature, + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect); + // kInternalized strings are created in the old space. + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->PrototypeTemplate()->Set(name_string, t); + t->SetClassName(name_string); // NODE_SET_PROTOTYPE_METHOD() compatibility. +} + +void SetProtoMethodNoSideEffect(v8::Isolate* isolate, + Local that, + const char* name, + v8::FunctionCallback callback) { + Local signature = v8::Signature::New(isolate, that); + Local t = + NewFunctionTemplate(isolate, + callback, + signature, + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasNoSideEffect); + // kInternalized strings are created in the old space. + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->PrototypeTemplate()->Set(name_string, t); + t->SetClassName(name_string); // NODE_SET_PROTOTYPE_METHOD() compatibility. +} + +void SetInstanceMethod(v8::Isolate* isolate, + Local that, + const char* name, + v8::FunctionCallback callback) { + Local signature = v8::Signature::New(isolate, that); + Local t = + NewFunctionTemplate(isolate, + callback, + signature, + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect); + // kInternalized strings are created in the old space. + const v8::NewStringType type = v8::NewStringType::kInternalized; + Local name_string = + v8::String::NewFromUtf8(isolate, name, type).ToLocalChecked(); + that->InstanceTemplate()->Set(name_string, t); + t->SetClassName(name_string); +} + +void SetConstructorFunction(Local context, + Local that, + const char* name, + Local tmpl, + SetConstructorFunctionFlag flag) { + Isolate* isolate = context->GetIsolate(); + SetConstructorFunction( + context, that, OneByteString(isolate, name), tmpl, flag); +} + +void SetConstructorFunction(Local context, + Local that, + Local name, + Local tmpl, + SetConstructorFunctionFlag flag) { + if (LIKELY(flag == SetConstructorFunctionFlag::SET_CLASS_NAME)) + tmpl->SetClassName(name); + that->Set(context, name, tmpl->GetFunction(context).ToLocalChecked()).Check(); +} + } // namespace node diff --git a/src/util.h b/src/util.h index a48071b093db97..ea3ccb364bd421 100644 --- a/src/util.h +++ b/src/util.h @@ -856,6 +856,66 @@ std::unique_ptr static_unique_pointer_cast(std::unique_ptr&& ptr) { // Returns a non-zero code if it fails to open or read the file, // aborts if it fails to close the file. int ReadFileSync(std::string* result, const char* path); + +v8::Local NewFunctionTemplate( + v8::Isolate* isolate, + v8::FunctionCallback callback, + v8::Local signature = v8::Local(), + v8::ConstructorBehavior behavior = v8::ConstructorBehavior::kAllow, + v8::SideEffectType side_effect = v8::SideEffectType::kHasSideEffect, + const v8::CFunction* c_function = nullptr); + +// Convenience methods for NewFunctionTemplate(). +void SetMethod(v8::Local context, + v8::Local that, + const char* name, + v8::FunctionCallback callback); + +void SetFastMethod(v8::Local context, + v8::Local that, + const char* name, + v8::FunctionCallback slow_callback, + const v8::CFunction* c_function); + +void SetProtoMethod(v8::Isolate* isolate, + v8::Local that, + const char* name, + v8::FunctionCallback callback); + +void SetInstanceMethod(v8::Isolate* isolate, + v8::Local that, + const char* name, + v8::FunctionCallback callback); + +// Safe variants denote the function has no side effects. +void SetMethodNoSideEffect(v8::Local context, + v8::Local that, + const char* name, + v8::FunctionCallback callback); +void SetProtoMethodNoSideEffect(v8::Isolate* isolate, + v8::Local that, + const char* name, + v8::FunctionCallback callback); + +enum class SetConstructorFunctionFlag { + NONE, + SET_CLASS_NAME, +}; + +void SetConstructorFunction(v8::Local context, + v8::Local that, + const char* name, + v8::Local tmpl, + SetConstructorFunctionFlag flag = + SetConstructorFunctionFlag::SET_CLASS_NAME); + +void SetConstructorFunction(v8::Local context, + v8::Local that, + v8::Local name, + v8::Local tmpl, + SetConstructorFunctionFlag flag = + SetConstructorFunctionFlag::SET_CLASS_NAME); + } // namespace node #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/uv.cc b/src/uv.cc index 2b0c424587e420..81e80711df8fc5 100644 --- a/src/uv.cc +++ b/src/uv.cc @@ -109,10 +109,8 @@ void Initialize(Local target, void* priv) { Environment* env = Environment::GetCurrent(context); Isolate* isolate = env->isolate(); - env->SetConstructorFunction( - target, - "errname", - env->NewFunctionTemplate(ErrName)); + SetConstructorFunction( + context, target, "errname", NewFunctionTemplate(isolate, ErrName)); // TODO(joyeecheung): This should be deprecated in user land in favor of // `util.getSystemErrorName(err)`. @@ -128,7 +126,7 @@ void Initialize(Local target, target->DefineOwnProperty(context, name, value, attributes).Check(); } - env->SetMethod(target, "getErrorMap", GetErrMap); + SetMethod(context, target, "getErrorMap", GetErrMap); } void RegisterExternalReferences(ExternalReferenceRegistry* registry) { diff --git a/test/parallel/test-env-newprotomethod-remove-unnecessary-prototypes.js b/test/parallel/test-env-newprotomethod-remove-unnecessary-prototypes.js index deb7993a145825..22c0c8665d148b 100644 --- a/test/parallel/test-env-newprotomethod-remove-unnecessary-prototypes.js +++ b/test/parallel/test-env-newprotomethod-remove-unnecessary-prototypes.js @@ -3,7 +3,7 @@ require('../common'); // This test ensures that unnecessary prototypes are no longer -// being generated by Environment::NewFunctionTemplate. +// being generated by node::NewFunctionTemplate. const assert = require('assert'); const { internalBinding } = require('internal/test/binding'); From 8c2d19b2d62aab358bf6fe77eb1f4e3d9c50f244 Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 27 Jun 2022 14:13:00 +0000 Subject: [PATCH 002/177] http: do not leak error listeners PR-URL: https://github.com/nodejs/node/pull/43587 Fixes: https://github.com/nodejs/node/issues/43548 Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca Reviewed-By: Ricky Zhou <0x19951125@gmail.com> Reviewed-By: Mohammed Keyvanzadeh --- lib/_http_server.js | 5 ++- test/parallel/test-http-socket-listeners.js | 44 +++++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 test/parallel/test-http-socket-listeners.js diff --git a/lib/_http_server.js b/lib/_http_server.js index d44257287a2915..1432ac8e637ad9 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -766,7 +766,10 @@ const requestHeaderFieldsTooLargeResponse = Buffer.from( function socketOnError(e) { // Ignore further errors this.removeListener('error', socketOnError); - this.on('error', noop); + + if (this.listenerCount('error') === 0) { + this.on('error', noop); + } if (!this.server.emit('clientError', e, this)) { if (this.writable && this.bytesWritten === 0) { diff --git a/test/parallel/test-http-socket-listeners.js b/test/parallel/test-http-socket-listeners.js new file mode 100644 index 00000000000000..2513bc1a90c9f6 --- /dev/null +++ b/test/parallel/test-http-socket-listeners.js @@ -0,0 +1,44 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); +const net = require('net'); + +// This test sends an invalid character to a HTTP server and purposely +// does not handle clientError (even if it sets an event handler). +// +// The idea is to let the server emit multiple errors on the socket, +// mostly due to parsing error, and make sure they don't result +// in leaking event listeners. + +let i = 0; +let socket; + +process.on('warning', common.mustNotCall()); + +const server = http.createServer(common.mustNotCall()); + +server.on('clientError', common.mustCallAtLeast((err) => { + assert.strictEqual(err.code, 'HPE_INVALID_METHOD'); + assert.strictEqual(err.rawPacket.toString(), '*'); + + if (i === 20) { + socket.end(); + } else { + socket.write('*'); + i++; + } +}, 1)); + +server.listen(0, () => { + socket = net.createConnection({ port: server.address().port }); + + socket.on('connect', () => { + socket.write('*'); + }); + + socket.on('close', () => { + server.close(); + }); +}); From e072c3aa701bc3d53adf1219b838ca407f5934e2 Mon Sep 17 00:00:00 2001 From: Geoffrey Booth <456802+GeoffreyBooth@users.noreply.github.com> Date: Mon, 25 Jul 2022 21:08:18 -0700 Subject: [PATCH 003/177] esm: move package config helpers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/43967 Reviewed-By: Jacob Smith Reviewed-By: Michaël Zasso Reviewed-By: Antoine du Hamel --- lib/internal/modules/esm/package_config.js | 142 +++++++++++++++++++++ lib/internal/modules/esm/resolve.js | 119 ++--------------- test/parallel/test-bootstrap-modules.js | 1 + 3 files changed, 153 insertions(+), 109 deletions(-) create mode 100644 lib/internal/modules/esm/package_config.js diff --git a/lib/internal/modules/esm/package_config.js b/lib/internal/modules/esm/package_config.js new file mode 100644 index 00000000000000..89e90d0d997cd2 --- /dev/null +++ b/lib/internal/modules/esm/package_config.js @@ -0,0 +1,142 @@ +'use strict'; + +const { + JSONParse, + SafeMap, + StringPrototypeEndsWith, +} = primordials; +const { URL, fileURLToPath } = require('internal/url'); +const { + ERR_INVALID_PACKAGE_CONFIG, +} = require('internal/errors').codes; + +const packageJsonReader = require('internal/modules/package_json_reader'); + + +/** + * @typedef {string | string[] | Record} Exports + * @typedef {'module' | 'commonjs'} PackageType + * @typedef {{ + * pjsonPath: string, + * exports?: ExportConfig, + * name?: string, + * main?: string, + * type?: PackageType, + * }} PackageConfig + */ + +/** @type {Map} */ +const packageJSONCache = new SafeMap(); + + +/** + * @param {string} path + * @param {string} specifier + * @param {string | URL | undefined} base + * @returns {PackageConfig} + */ +function getPackageConfig(path, specifier, base) { + const existing = packageJSONCache.get(path); + if (existing !== undefined) { + return existing; + } + const source = packageJsonReader.read(path).string; + if (source === undefined) { + const packageConfig = { + pjsonPath: path, + exists: false, + main: undefined, + name: undefined, + type: 'none', + exports: undefined, + imports: undefined, + }; + packageJSONCache.set(path, packageConfig); + return packageConfig; + } + + let packageJSON; + try { + packageJSON = JSONParse(source); + } catch (error) { + throw new ERR_INVALID_PACKAGE_CONFIG( + path, + (base ? `"${specifier}" from ` : '') + fileURLToPath(base || specifier), + error.message + ); + } + + let { imports, main, name, type } = packageJSON; + const { exports } = packageJSON; + if (typeof imports !== 'object' || imports === null) { + imports = undefined; + } + if (typeof main !== 'string') { + main = undefined; + } + if (typeof name !== 'string') { + name = undefined; + } + // Ignore unknown types for forwards compatibility + if (type !== 'module' && type !== 'commonjs') { + type = 'none'; + } + + const packageConfig = { + pjsonPath: path, + exists: true, + main, + name, + type, + exports, + imports, + }; + packageJSONCache.set(path, packageConfig); + return packageConfig; +} + + +/** + * @param {URL | string} resolved + * @returns {PackageConfig} + */ +function getPackageScopeConfig(resolved) { + let packageJSONUrl = new URL('./package.json', resolved); + while (true) { + const packageJSONPath = packageJSONUrl.pathname; + if (StringPrototypeEndsWith(packageJSONPath, 'node_modules/package.json')) { + break; + } + const packageConfig = getPackageConfig(fileURLToPath(packageJSONUrl), resolved); + if (packageConfig.exists) { + return packageConfig; + } + + const lastPackageJSONUrl = packageJSONUrl; + packageJSONUrl = new URL('../package.json', packageJSONUrl); + + // Terminates at root where ../package.json equals ../../package.json + // (can't just check "/package.json" for Windows support). + if (packageJSONUrl.pathname === lastPackageJSONUrl.pathname) { + break; + } + } + const packageJSONPath = fileURLToPath(packageJSONUrl); + const packageConfig = { + pjsonPath: packageJSONPath, + exists: false, + main: undefined, + name: undefined, + type: 'none', + exports: undefined, + imports: undefined, + }; + packageJSONCache.set(packageJSONPath, packageConfig); + return packageConfig; +} + + +module.exports = { + getPackageConfig, + getPackageScopeConfig, +}; diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index 435a6f279e9fd1..64a566d6edcc47 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -58,9 +58,16 @@ const { ERR_NETWORK_IMPORT_DISALLOWED, ERR_UNSUPPORTED_ESM_URL_SCHEME, } = require('internal/errors').codes; -const { Module: CJSModule } = require('internal/modules/cjs/loader'); +const { Module: CJSModule } = require('internal/modules/cjs/loader'); const packageJsonReader = require('internal/modules/package_json_reader'); +const { getPackageConfig, getPackageScopeConfig } = require('internal/modules/esm/package_config'); + +/** + * @typedef {import('internal/modules/esm/package_config.js').PackageConfig} PackageConfig + */ + + const userConditions = getOptionValue('--conditions'); const noAddons = getOptionValue('--no-addons'); const addonConditions = noAddons ? [] : ['node-addons']; @@ -74,18 +81,6 @@ const DEFAULT_CONDITIONS = ObjectFreeze([ const DEFAULT_CONDITIONS_SET = new SafeSet(DEFAULT_CONDITIONS); -/** - * @typedef {string | string[] | Record} Exports - * @typedef {'module' | 'commonjs'} PackageType - * @typedef {{ - * pjsonPath: string, - * exports?: ExportConfig, - * name?: string, - * main?: string, - * type?: PackageType, - * }} PackageConfig - */ - const emittedPackageWarnings = new SafeSet(); function emitTrailingSlashPatternDeprecation(match, pjsonUrl, base) { @@ -154,7 +149,6 @@ function getConditionsSet(conditions) { } const realpathCache = new SafeMap(); -const packageJSONCache = new SafeMap(); /* string -> PackageConfig */ /** * @param {string | URL} path @@ -163,99 +157,6 @@ const packageJSONCache = new SafeMap(); /* string -> PackageConfig */ const tryStatSync = (path) => statSync(path, { throwIfNoEntry: false }) ?? new Stats(); -/** - * @param {string} path - * @param {string} specifier - * @param {string | URL | undefined} base - * @returns {PackageConfig} - */ -function getPackageConfig(path, specifier, base) { - const existing = packageJSONCache.get(path); - if (existing !== undefined) { - return existing; - } - const source = packageJsonReader.read(path).string; - if (source === undefined) { - const packageConfig = { - pjsonPath: path, - exists: false, - main: undefined, - name: undefined, - type: 'none', - exports: undefined, - imports: undefined, - }; - packageJSONCache.set(path, packageConfig); - return packageConfig; - } - - let packageJSON; - try { - packageJSON = JSONParse(source); - } catch (error) { - throw new ERR_INVALID_PACKAGE_CONFIG( - path, - (base ? `"${specifier}" from ` : '') + fileURLToPath(base || specifier), - error.message - ); - } - - let { imports, main, name, type } = packageJSON; - const { exports } = packageJSON; - if (typeof imports !== 'object' || imports === null) imports = undefined; - if (typeof main !== 'string') main = undefined; - if (typeof name !== 'string') name = undefined; - // Ignore unknown types for forwards compatibility - if (type !== 'module' && type !== 'commonjs') type = 'none'; - - const packageConfig = { - pjsonPath: path, - exists: true, - main, - name, - type, - exports, - imports, - }; - packageJSONCache.set(path, packageConfig); - return packageConfig; -} - -/** - * @param {URL | string} resolved - * @returns {PackageConfig} - */ -function getPackageScopeConfig(resolved) { - let packageJSONUrl = new URL('./package.json', resolved); - while (true) { - const packageJSONPath = packageJSONUrl.pathname; - if (StringPrototypeEndsWith(packageJSONPath, 'node_modules/package.json')) - break; - const packageConfig = getPackageConfig(fileURLToPath(packageJSONUrl), - resolved); - if (packageConfig.exists) return packageConfig; - - const lastPackageJSONUrl = packageJSONUrl; - packageJSONUrl = new URL('../package.json', packageJSONUrl); - - // Terminates at root where ../package.json equals ../../package.json - // (can't just check "/package.json" for Windows support). - if (packageJSONUrl.pathname === lastPackageJSONUrl.pathname) break; - } - const packageJSONPath = fileURLToPath(packageJSONUrl); - const packageConfig = { - pjsonPath: packageJSONPath, - exists: false, - main: undefined, - name: undefined, - type: 'none', - exports: undefined, - imports: undefined, - }; - packageJSONCache.set(packageJSONPath, packageConfig); - return packageConfig; -} - /** * @param {string | URL} url * @returns {boolean} @@ -609,7 +510,7 @@ function resolvePackageTarget(packageJSONUrl, target, subpath, packageSubpath, /** * - * @param {Exports} exports + * @param {import('internal/modules/esm/package_config.js').Exports} exports * @param {URL} packageJSONUrl * @param {string | URL | undefined} base * @returns {boolean} @@ -799,7 +700,7 @@ function packageImportsResolve(name, base, conditions) { /** * @param {URL} url - * @returns {PackageType} + * @returns {import('internal/modules/esm/package_config.js').PackageType} */ function getPackageType(url) { const packageConfig = getPackageScopeConfig(url); diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index 495f08f716f573..87c9dc1b92523c 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -85,6 +85,7 @@ const expectedModules = new Set([ 'NativeModule internal/modules/esm/loader', 'NativeModule internal/modules/esm/module_job', 'NativeModule internal/modules/esm/module_map', + 'NativeModule internal/modules/esm/package_config', 'NativeModule internal/modules/esm/resolve', 'NativeModule internal/modules/esm/translators', 'NativeModule internal/modules/package_json_reader', From 1175d9036aa9db4ab7bad0c147f874b7340d64ac Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 25 Jul 2022 21:08:27 -0700 Subject: [PATCH 004/177] tools: add verbose flag to find-inactive-collaborators MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/43964 Reviewed-By: Mestery Reviewed-By: Luigi Pinca Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Tobias Nießen Reviewed-By: Darshan Sen --- tools/find-inactive-collaborators.mjs | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/tools/find-inactive-collaborators.mjs b/tools/find-inactive-collaborators.mjs index 71a00b970a9732..0787c634a1a4c9 100755 --- a/tools/find-inactive-collaborators.mjs +++ b/tools/find-inactive-collaborators.mjs @@ -7,8 +7,15 @@ import cp from 'node:child_process'; import fs from 'node:fs'; import readline from 'node:readline'; +import { parseArgs } from 'node:util'; -const SINCE = process.argv[2] || '18 months ago'; +const args = parseArgs({ + allowPositionals: true, + options: { verbose: { type: 'boolean', short: 'v' } } +}); + +const verbose = args.values.verbose; +const SINCE = args.positionals[0] || '18 months ago'; async function runGitCommand(cmd, mapFn) { const childProcess = cp.spawn('/bin/sh', ['-c', cmd], { @@ -176,11 +183,12 @@ async function moveCollaboratorToEmeritus(peopleToMove) { // Get list of current collaborators from README.md. const collaborators = await getCollaboratorsFromReadme(); -console.log(`Since ${SINCE}:\n`); -console.log(`* ${authors.size.toLocaleString()} authors have made commits.`); -console.log(`* ${approvingReviewers.size.toLocaleString()} reviewers have approved landed commits.`); -console.log(`* ${collaborators.length.toLocaleString()} collaborators currently in the project.`); - +if (verbose) { + console.log(`Since ${SINCE}:\n`); + console.log(`* ${authors.size.toLocaleString()} authors have made commits.`); + console.log(`* ${approvingReviewers.size.toLocaleString()} reviewers have approved landed commits.`); + console.log(`* ${collaborators.length.toLocaleString()} collaborators currently in the project.`); +} const inactive = collaborators.filter((collaborator) => !authors.has(collaborator.mailmap) && !approvingReviewers.has(collaborator.name) From 7af55dbc40a6bbf150d64c2e7bb159f4c3189f11 Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Tue, 26 Jul 2022 07:43:56 +0300 Subject: [PATCH 005/177] doc: add missing test runner option PR-URL: https://github.com/nodejs/node/pull/43989 Refs: https://github.com/nodejs/node/pull/43554 Reviewed-By: Antoine du Hamel Reviewed-By: Darshan Sen --- doc/api/test.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/api/test.md b/doc/api/test.md index bef97c4b1917a7..cb5912189b3244 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -321,6 +321,9 @@ internally. -* Can be used to abort test subtasks when the test has been aborted. +* {AbortSignal} Can be used to abort test subtasks when the test has been + aborted. ```js test('top level test', async (t) => { @@ -575,7 +576,7 @@ changes: * `only` {boolean} If truthy, and the test context is configured to run `only` tests, then this test will be run. Otherwise, the test is skipped. **Default:** `false`. - * `signal` {AbortSignal} Allows aborting an in-progress test + * `signal` {AbortSignal} Allows aborting an in-progress test. * `skip` {boolean|string} If truthy, the test is skipped. If a string is provided, that string is displayed in the test results as the reason for skipping the test. **Default:** `false`. @@ -622,7 +623,8 @@ exposed as part of the API. added: v18.7.0 --> -* Can be used to abort test subtasks when the test has been aborted. +* {AbortSignal} Can be used to abort test subtasks when the test has been + aborted. [TAP]: https://testanything.org/ [`--test-only`]: cli.md#--test-only From 9244d6d41666665029f09e26dc89ad2624e94982 Mon Sep 17 00:00:00 2001 From: Daeyeon Jeong Date: Tue, 26 Jul 2022 22:03:18 +0900 Subject: [PATCH 008/177] test: fix test-cluster-concurrent-disconnect The error code, `ECONNRESET`, is observed on linux. This commit adds it as an expected error code. Signed-off-by: Daeyeon Jeong daeyeon.dev@gmail.com PR-URL: https://github.com/nodejs/node/pull/43961 Refs: https://ci.nodejs.org/job/node-test-commit-linux-containered/nodes=ubuntu1804_sharedlibs_withoutintl_x64/32901/testReport/junit/(root)/test/parallel_test_cluster_concurrent_disconnect/ Reviewed-By: Luigi Pinca Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Feng Yu Reviewed-By: Darshan Sen --- test/parallel/test-cluster-concurrent-disconnect.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-cluster-concurrent-disconnect.js b/test/parallel/test-cluster-concurrent-disconnect.js index d97db39a5bb38a..e3771a0a4fdcb0 100644 --- a/test/parallel/test-cluster-concurrent-disconnect.js +++ b/test/parallel/test-cluster-concurrent-disconnect.js @@ -29,7 +29,7 @@ if (cluster.isPrimary) { if (common.isOSX) { assert(['EPIPE', 'ENOTCONN'].includes(err.code), err); } else { - assert.strictEqual(err.code, 'EPIPE'); + assert(['EPIPE', 'ECONNRESET'].includes(err.code), err); } }); From 2cab7bb79176c0000e080aac6671d76ec3eccdcd Mon Sep 17 00:00:00 2001 From: theanarkh Date: Tue, 26 Jul 2022 21:25:06 +0800 Subject: [PATCH 009/177] test: reduce loop times for preventing test from timeout MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/43981 Refs: https://github.com/nodejs/reliability/issues/331 Reviewed-By: Feng Yu Reviewed-By: Michaël Zasso --- test/parallel/test-vm-break-on-sigint.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-vm-break-on-sigint.js b/test/parallel/test-vm-break-on-sigint.js index 6ad25bfed234b8..92fdae0c839056 100644 --- a/test/parallel/test-vm-break-on-sigint.js +++ b/test/parallel/test-vm-break-on-sigint.js @@ -16,7 +16,7 @@ if (!process.env.HAS_STARTED_WORKER) { } } else { const ctx = vm.createContext({}); - for (let i = 0; i < 10000; i++) { + for (let i = 0; i < 100; i++) { vm.runInContext('console.log(1)', ctx, { breakOnSigint: true }); } } From 51a0310398613fd5755567f2968031c120305d57 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 27 Jul 2022 00:37:41 +0200 Subject: [PATCH 010/177] test_runner: validate `concurrency` option PR-URL: https://github.com/nodejs/node/pull/43976 Reviewed-By: Darshan Sen Reviewed-By: Benjamin Gruenbaum --- lib/internal/test_runner/test.js | 28 +++++++++++++------ .../parallel/test-runner-option-validation.js | 11 ++++++++ 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 1663958f9b7c94..89b8c196038549 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -19,6 +19,7 @@ const { once } = require('events'); const { AbortController } = require('internal/abort_controller'); const { codes: { + ERR_INVALID_ARG_TYPE, ERR_TEST_FAILURE, }, kIsNodeError, @@ -33,9 +34,9 @@ const { } = require('internal/util'); const { isPromise } = require('internal/util/types'); const { - isUint32, validateAbortSignal, validateNumber, + validateUint32, } = require('internal/validators'); const { setTimeout } = require('timers/promises'); const { TIMEOUT_MAX } = require('internal/timers'); @@ -149,14 +150,23 @@ class Test extends AsyncResource { this.timeout = parent.timeout; } - if (isUint32(concurrency) && concurrency !== 0) { - this.concurrency = concurrency; - } else if (typeof concurrency === 'boolean') { - if (concurrency) { - this.concurrency = isTestRunner ? MathMax(cpus().length - 1, 1) : Infinity; - } else { - this.concurrency = 1; - } + switch (typeof concurrency) { + case 'number': + validateUint32(concurrency, 'options.concurrency', 1); + this.concurrency = concurrency; + break; + + case 'boolean': + if (concurrency) { + this.concurrency = isTestRunner ? MathMax(cpus().length - 1, 1) : Infinity; + } else { + this.concurrency = 1; + } + break; + + default: + if (concurrency != null) + throw new ERR_INVALID_ARG_TYPE('options.concurrency', ['boolean', 'number'], concurrency); } if (timeout != null && timeout !== Infinity) { diff --git a/test/parallel/test-runner-option-validation.js b/test/parallel/test-runner-option-validation.js index a6b7cb1826b166..9d0129253613f2 100644 --- a/test/parallel/test-runner-option-validation.js +++ b/test/parallel/test-runner-option-validation.js @@ -13,3 +13,14 @@ const test = require('node:test'); // Valid values should not throw. test({ timeout }); }); + +[Symbol(), {}, [], () => {}, 1n, '1'].forEach((concurrency) => { + assert.throws(() => test({ concurrency }), { code: 'ERR_INVALID_ARG_TYPE' }); +}); +[-1, 0, 1.1, -Infinity, NaN, 2 ** 33, Number.MAX_SAFE_INTEGER].forEach((concurrency) => { + assert.throws(() => test({ concurrency }), { code: 'ERR_OUT_OF_RANGE' }); +}); +[null, undefined, 1, 2 ** 31, true, false].forEach((concurrency) => { + // Valid values should not throw. + test({ concurrency }); +}); From b6b632c09cc4aee43b706859ff306ac7da55994e Mon Sep 17 00:00:00 2001 From: theanarkh Date: Wed, 27 Jul 2022 06:37:49 +0800 Subject: [PATCH 011/177] net: add local family PR-URL: https://github.com/nodejs/node/pull/43975 Reviewed-By: Paolo Insogna Reviewed-By: Antoine du Hamel Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca --- doc/api/net.md | 11 +++++++++++ lib/net.js | 4 ++++ test/parallel/test-net-local-address-port.js | 1 + 3 files changed, 16 insertions(+) diff --git a/doc/api/net.md b/doc/api/net.md index 5731af523d0151..9e74b2a5a79b9c 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -294,6 +294,7 @@ TCP server, the argument is as follows, otherwise the argument is `undefined`. * `data` {Object} The argument passed to event listener. * `localAddress` {string} Local address. * `localPort` {number} Local port. + * `localFamily` {string} Local family. * `remoteAddress` {string} Remote address. * `remotePort` {number} Remote port. * `remoteFamily` {string} Remote IP family. `'IPv4'` or `'IPv6'`. @@ -1045,6 +1046,16 @@ added: v0.9.6 The numeric representation of the local port. For example, `80` or `21`. +### `socket.localFamily` + + + +* {string} + +The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + ### `socket.pause()` * Returns: {net.Socket} The socket itself. diff --git a/lib/net.js b/lib/net.js index cf48edb0274cc7..02eb9c0d4fe5a5 100644 --- a/lib/net.js +++ b/lib/net.js @@ -840,6 +840,9 @@ protoGetter('localPort', function localPort() { return this._getsockname().port; }); +protoGetter('localFamily', function localFamily() { + return this._getsockname().family; +}); Socket.prototype[kAfterAsyncWrite] = function() { this[kLastWriteQueueSize] = 0; @@ -1674,6 +1677,7 @@ function onconnection(err, clientHandle) { clientHandle.getsockname(localInfo); data.localAddress = localInfo.address; data.localPort = localInfo.port; + data.localFamily = localInfo.family; } if (clientHandle.getpeername) { const remoteInfo = ObjectCreate(null); diff --git a/test/parallel/test-net-local-address-port.js b/test/parallel/test-net-local-address-port.js index dfd7ef359b71d2..cfc6f61ef35ad8 100644 --- a/test/parallel/test-net-local-address-port.js +++ b/test/parallel/test-net-local-address-port.js @@ -27,6 +27,7 @@ const net = require('net'); const server = net.createServer(common.mustCall(function(socket) { assert.strictEqual(socket.localAddress, common.localhostIPv4); assert.strictEqual(socket.localPort, this.address().port); + assert.strictEqual(socket.localFamily, this.address().family); socket.on('end', function() { server.close(); }); From d964b308ae25b292f155cb4b29aacf2abe094c3d Mon Sep 17 00:00:00 2001 From: Feng Yu Date: Wed, 27 Jul 2022 06:38:07 +0800 Subject: [PATCH 012/177] test: remove test-gc-http-client-timeout from flaky list MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/43971 Refs: https://github.com/nodejs/node/pull/43949 Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Chengzhong Wu Reviewed-By: Darshan Sen --- test/sequential/sequential.status | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index 2ade846d8bc0ea..56f9667d7a3058 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -32,8 +32,6 @@ test-tls-psk-client: PASS, FLAKY test-tls-securepair-client: PASS, FLAKY [$arch==arm] -# https://github.com/nodejs/node/issues/43638 -test-gc-http-client-timeout: PASS,FLAKY [$arch==s390x] # https://github.com/nodejs/node/issues/41286 From a0c57837c4d79b9bf45a65ee547043d46794b017 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 26 Jul 2022 15:38:16 -0700 Subject: [PATCH 013/177] deps: cherry-pick 00704f5a from V8 upstream MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Original commit message: Add more efficient API for accesssing ArrayBuffer raw data Raw data access is already possible via GetBackingStore()->GetData(). This API exposes a more efficient way for accessing JSArrayBuffer::backing_store (which, despite the confusing name, is no the BackingStore but its raw data pointer). Bug: v8:10343 Change-Id: I695cea91e2c3de75ce6c86bac6e413ce6617958b Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3764341 Reviewed-by: Camillo Bruni Commit-Queue: Marja Hölttä Cr-Commit-Position: refs/heads/main@{#81745} Refs: https://github.com/v8/v8/commit/00704f5a03d9db02c14c4f4c35188effc46e82ab Refs: https://github.com/nodejs/node/issues/32226 PR-URL: https://github.com/nodejs/node/pull/43921 Reviewed-By: Jiawen Geng Reviewed-By: Michaël Zasso Reviewed-By: Feng Yu --- common.gypi | 2 +- deps/v8/include/v8-array-buffer.h | 12 ++++++++++++ deps/v8/src/api/api.cc | 10 ++++++++++ deps/v8/test/cctest/test-api-array-buffer.cc | 5 ++++- 4 files changed, 27 insertions(+), 2 deletions(-) diff --git a/common.gypi b/common.gypi index 851816e084565d..03220a0200a40d 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.9', + 'v8_embedder_string': '-node.10', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/include/v8-array-buffer.h b/deps/v8/include/v8-array-buffer.h index e9047b79ce3b1d..bab840f82c1a3d 100644 --- a/deps/v8/include/v8-array-buffer.h +++ b/deps/v8/include/v8-array-buffer.h @@ -256,6 +256,12 @@ class V8_EXPORT ArrayBuffer : public Object { */ std::shared_ptr GetBackingStore(); + /** + * More efficient shortcut for GetBackingStore()->Data(). The returned pointer + * is valid as long as the ArrayBuffer is alive. + */ + void* Data() const; + V8_INLINE static ArrayBuffer* Cast(Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); @@ -414,6 +420,12 @@ class V8_EXPORT SharedArrayBuffer : public Object { */ std::shared_ptr GetBackingStore(); + /** + * More efficient shortcut for GetBackingStore()->Data(). The returned pointer + * is valid as long as the ArrayBuffer is alive. + */ + void* Data() const; + V8_INLINE static SharedArrayBuffer* Cast(Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); diff --git a/deps/v8/src/api/api.cc b/deps/v8/src/api/api.cc index 393f5471914dd8..580ba47f1a727d 100644 --- a/deps/v8/src/api/api.cc +++ b/deps/v8/src/api/api.cc @@ -4038,6 +4038,11 @@ std::shared_ptr v8::ArrayBuffer::GetBackingStore() { return std::static_pointer_cast(bs_base); } +void* v8::ArrayBuffer::Data() const { + i::Handle self = Utils::OpenHandle(this); + return self->backing_store(); +} + std::shared_ptr v8::SharedArrayBuffer::GetBackingStore() { i::Handle self = Utils::OpenHandle(this); std::shared_ptr backing_store = self->GetBackingStore(); @@ -4048,6 +4053,11 @@ std::shared_ptr v8::SharedArrayBuffer::GetBackingStore() { return std::static_pointer_cast(bs_base); } +void* v8::SharedArrayBuffer::Data() const { + i::Handle self = Utils::OpenHandle(this); + return self->backing_store(); +} + void v8::ArrayBuffer::CheckCast(Value* that) { i::Handle obj = Utils::OpenHandle(that); Utils::ApiCheck( diff --git a/deps/v8/test/cctest/test-api-array-buffer.cc b/deps/v8/test/cctest/test-api-array-buffer.cc index d472ebcf32033d..b087274b31137d 100644 --- a/deps/v8/test/cctest/test-api-array-buffer.cc +++ b/deps/v8/test/cctest/test-api-array-buffer.cc @@ -366,6 +366,7 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) { // Should not move the pointer CHECK_EQ(ab->GetBackingStore()->Data(), store_ptr); + CHECK_EQ(ab->Data(), store_ptr); CcTest::array_buffer_allocator()->Free(buffer, 100); } @@ -394,8 +395,8 @@ THREADED_TEST(SkipArrayBufferDuringScavenge) { CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now - // Use `ab` to silence compiler warning CHECK_EQ(ab->GetBackingStore()->Data(), store_ptr); + CHECK_EQ(ab->Data(), store_ptr); } THREADED_TEST(Regress1006600) { @@ -418,6 +419,7 @@ THREADED_TEST(ArrayBuffer_NewBackingStore) { CHECK(!backing_store->IsShared()); Local ab = v8::ArrayBuffer::New(isolate, backing_store); CHECK_EQ(backing_store.get(), ab->GetBackingStore().get()); + CHECK_EQ(backing_store->Data(), ab->Data()); } THREADED_TEST(SharedArrayBuffer_NewBackingStore) { @@ -430,6 +432,7 @@ THREADED_TEST(SharedArrayBuffer_NewBackingStore) { Local ab = v8::SharedArrayBuffer::New(isolate, backing_store); CHECK_EQ(backing_store.get(), ab->GetBackingStore().get()); + CHECK_EQ(backing_store->Data(), ab->Data()); } static void* backing_store_custom_data = nullptr; From 94912bb09c886ebb5e381da679987ec4fedd86e2 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 27 Jul 2022 00:38:24 +0200 Subject: [PATCH 014/177] lib: add `Promise` methods to `avoid-prototype-pollution` lint rule PR-URL: https://github.com/nodejs/node/pull/43849 Reviewed-By: Matteo Collina Reviewed-By: Paolo Insogna --- lib/internal/debugger/inspect.js | 39 +++++++++++-------- lib/internal/http2/core.js | 6 +-- lib/internal/main/worker_thread.js | 4 +- lib/internal/modules/esm/module_job.js | 4 +- lib/internal/per_context/primordials.js | 3 -- lib/internal/streams/operators.js | 4 +- lib/internal/webstreams/readablestream.js | 6 +-- .../test-eslint-avoid-prototype-pollution.js | 20 ++++++++++ test/parallel/test-primordials-promise.js | 18 ++++----- .../eslint-rules/avoid-prototype-pollution.js | 32 +++++++++++++-- 10 files changed, 91 insertions(+), 45 deletions(-) diff --git a/lib/internal/debugger/inspect.js b/lib/internal/debugger/inspect.js index e5211c285bd5eb..42b5c64ab87029 100644 --- a/lib/internal/debugger/inspect.js +++ b/lib/internal/debugger/inspect.js @@ -11,7 +11,6 @@ const { FunctionPrototypeBind, Number, Promise, - PromisePrototypeCatch, PromisePrototypeThen, PromiseResolve, Proxy, @@ -169,12 +168,17 @@ class NodeInspector { process.once('SIGTERM', exitCodeZero); process.once('SIGHUP', exitCodeZero); - PromisePrototypeCatch(PromisePrototypeThen(this.run(), async () => { - const repl = await startRepl(); - this.repl = repl; - this.repl.on('exit', exitCodeZero); - this.paused = false; - }), (error) => process.nextTick(() => { throw error; })); + (async () => { + try { + await this.run(); + const repl = await startRepl(); + this.repl = repl; + this.repl.on('exit', exitCodeZero); + this.paused = false; + } catch (error) { + process.nextTick(() => { throw error; }); + } + })(); } suspendReplWhile(fn) { @@ -183,16 +187,19 @@ class NodeInspector { } this.stdin.pause(); this.paused = true; - return PromisePrototypeCatch(PromisePrototypeThen(new Promise((resolve) => { - resolve(fn()); - }), () => { - this.paused = false; - if (this.repl) { - this.repl.resume(); - this.repl.displayPrompt(); + return (async () => { + try { + await fn(); + this.paused = false; + if (this.repl) { + this.repl.resume(); + this.repl.displayPrompt(); + } + this.stdin.resume(); + } catch (error) { + process.nextTick(() => { throw error; }); } - this.stdin.resume(); - }), (error) => process.nextTick(() => { throw error; })); + })(); } killChild() { diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 737c4c3b357360..c6915e903e9d01 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -17,7 +17,7 @@ const { ObjectDefineProperty, ObjectPrototypeHasOwnProperty, Promise, - PromisePrototypeCatch, + PromisePrototypeThen, Proxy, ReflectApply, ReflectGet, @@ -2456,8 +2456,8 @@ function processHeaders(oldHeaders, options) { function onFileUnpipe() { const stream = this.sink[kOwner]; if (stream.ownsFd) - PromisePrototypeCatch(this.source.close(), - FunctionPrototypeBind(stream.destroy, stream)); + PromisePrototypeThen(this.source.close(), undefined, + FunctionPrototypeBind(stream.destroy, stream)); else this.source.releaseFD(); } diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index e21c1b1fe2cc7f..8d5bc45edd50f3 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -8,7 +8,7 @@ const { ArrayPrototypePushApply, ArrayPrototypeSplice, ObjectDefineProperty, - PromisePrototypeCatch, + PromisePrototypeThen, globalThis: { Atomics }, } = primordials; @@ -185,7 +185,7 @@ port.on('message', (message) => { evalScript(name, filename); } else if (doEval === 'module') { const { evalModule } = require('internal/process/execution'); - PromisePrototypeCatch(evalModule(filename), (e) => { + PromisePrototypeThen(evalModule(filename), undefined, (e) => { workerOnGlobalUncaughtException(e, true); }); } else { diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index 3a2ec7343a1d53..2dd69b32f77cb5 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -8,7 +8,7 @@ const { ObjectCreate, ObjectSetPrototypeOf, PromiseResolve, - PromisePrototypeCatch, + PromisePrototypeThen, ReflectApply, RegExpPrototypeExec, RegExpPrototypeSymbolReplace, @@ -88,7 +88,7 @@ class ModuleJob { this.linked = link(); // This promise is awaited later anyway, so silence // 'unhandled rejection' warnings. - PromisePrototypeCatch(this.linked, noop); + PromisePrototypeThen(this.linked, undefined, noop); // instantiated == deep dependency jobs wrappers are instantiated, // and module wrapper is instantiated. diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 7c6f513d9ccebd..2ee5efb6cb7520 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -409,9 +409,6 @@ const SafePromise = makeSafe( } ); -primordials.PromisePrototypeCatch = (thisPromise, onRejected) => - PromisePrototypeThen(thisPromise, undefined, onRejected); - /** * Attaches a callback that is invoked when the Promise is settled (fulfilled or * rejected). The resolved value cannot be modified from the callback. diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index a450aa78e759ab..d45efbad7c23f6 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -25,7 +25,7 @@ const { NumberIsNaN, Promise, PromiseReject, - PromisePrototypeCatch, + PromisePrototypeThen, Symbol, } = primordials; @@ -113,7 +113,7 @@ function map(fn, options) { queue.push(kEof); } catch (err) { const val = PromiseReject(err); - PromisePrototypeCatch(val, onDone); + PromisePrototypeThen(val, undefined, onDone); queue.push(val); } finally { done = true; diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index 724710fdb1749d..5c5c8da724ace9 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -16,7 +16,6 @@ const { ObjectDefineProperties, ObjectSetPrototypeOf, Promise, - PromisePrototypeCatch, PromisePrototypeThen, PromiseResolve, PromiseReject, @@ -1334,7 +1333,7 @@ function readableStreamPipeTo( if (stream[kState].state === 'errored') action(stream[kState].storedError); else - PromisePrototypeCatch(promise, action); + PromisePrototypeThen(promise, undefined, action); } function watchClosed(stream, promise, action) { @@ -1503,8 +1502,9 @@ function readableStreamTee(stream, cloneForBranch2) { branch2 = createTeeReadableStream(nonOpStart, pullAlgorithm, cancel2Algorithm); - PromisePrototypeCatch( + PromisePrototypeThen( reader[kState].close.promise, + undefined, (error) => { readableStreamDefaultControllerError(branch1[kState].controller, error); readableStreamDefaultControllerError(branch2[kState].controller, error); diff --git a/test/parallel/test-eslint-avoid-prototype-pollution.js b/test/parallel/test-eslint-avoid-prototype-pollution.js index 26b0852c0c24ee..f10d6ea973b347 100644 --- a/test/parallel/test-eslint-avoid-prototype-pollution.js +++ b/test/parallel/test-eslint-avoid-prototype-pollution.js @@ -203,5 +203,25 @@ new RuleTester({ code: 'new Proxy({}, { ...{ __proto__: null } })', errors: [{ message: /null-prototype/ }] }, + { + code: 'PromisePrototypeCatch(promise, ()=>{})', + errors: [{ message: /\bPromisePrototypeThen\b/ }] + }, + { + code: 'PromiseAll([])', + errors: [{ message: /\bSafePromiseAll\b/ }] + }, + { + code: 'PromiseAllSettled([])', + errors: [{ message: /\bSafePromiseAllSettled\b/ }] + }, + { + code: 'PromiseAny([])', + errors: [{ message: /\bSafePromiseAny\b/ }] + }, + { + code: 'PromiseRace([])', + errors: [{ message: /\bSafePromiseRace\b/ }] + }, ] }); diff --git a/test/parallel/test-primordials-promise.js b/test/parallel/test-primordials-promise.js index 7ff29fc0f5d407..c753b4b7e79912 100644 --- a/test/parallel/test-primordials-promise.js +++ b/test/parallel/test-primordials-promise.js @@ -5,7 +5,6 @@ const common = require('../common'); const assert = require('assert'); const { - PromisePrototypeCatch, PromisePrototypeThen, SafePromiseAll, SafePromiseAllSettled, @@ -14,16 +13,15 @@ const { SafePromiseRace, } = require('internal/test/binding').primordials; -Array.prototype[Symbol.iterator] = common.mustNotCall(); -Promise.all = common.mustNotCall(); -Promise.allSettled = common.mustNotCall(); -Promise.any = common.mustNotCall(); -Promise.race = common.mustNotCall(); -Promise.prototype.catch = common.mustNotCall(); -Promise.prototype.finally = common.mustNotCall(); -Promise.prototype.then = common.mustNotCall(); +Array.prototype[Symbol.iterator] = common.mustNotCall('%Array.prototype%[@@iterator]'); +Promise.all = common.mustNotCall('%Promise%.all'); +Promise.allSettled = common.mustNotCall('%Promise%.allSettled'); +Promise.any = common.mustNotCall('%Promise%.any'); +Promise.race = common.mustNotCall('%Promise%.race'); +Promise.prototype.catch = common.mustNotCall('%Promise.prototype%.catch'); +Promise.prototype.finally = common.mustNotCall('%Promise.prototype%.finally'); +Promise.prototype.then = common.mustNotCall('%Promise.prototype%.then'); -assertIsPromise(PromisePrototypeCatch(Promise.reject(), common.mustCall())); assertIsPromise(PromisePrototypeThen(test(), common.mustCall())); assertIsPromise(SafePromisePrototypeFinally(test(), common.mustCall())); diff --git a/tools/eslint-rules/avoid-prototype-pollution.js b/tools/eslint-rules/avoid-prototype-pollution.js index 1f71272bd7d0b3..d59b62f95028cc 100644 --- a/tools/eslint-rules/avoid-prototype-pollution.js +++ b/tools/eslint-rules/avoid-prototype-pollution.js @@ -109,11 +109,11 @@ module.exports = { testRange.start = testRange.start + 'RegexpPrototype'.length; testRange.end = testRange.start + 'Test'.length; return [ - fixer.replaceTextRange(node.range, 'Exec'), + fixer.replaceTextRange(testRange, 'Exec'), fixer.insertTextAfter(node, ' !== null'), ]; } - }] + }], }); }, [`${CallExpression}[expression.callee.name=${/^RegExpPrototypeSymbol(Match|MatchAll|Search)$/}]`](node) { @@ -142,9 +142,33 @@ module.exports = { } context.report({ node, - message: 'Proxy handler must be a null-prototype object' + message: 'Proxy handler must be a null-prototype object', }); - } + }, + + [`${CallExpression}[expression.callee.name=PromisePrototypeCatch]`](node) { + context.report({ + node, + message: '%Promise.prototype.catch% look up the `then` property of ' + + 'the `this` argument, use PromisePrototypeThen instead', + }); + }, + + [`${CallExpression}[expression.callee.name=PromisePrototypeFinally]`](node) { + context.report({ + node, + message: '%Promise.prototype.finally% look up the `then` property of ' + + 'the `this` argument, use SafePromisePrototypeFinally or ' + + 'try/finally instead', + }); + }, + + [`${CallExpression}[expression.callee.name=${/^Promise(All(Settled)?|Any|Race)/}]`](node) { + context.report({ + node, + message: `Use Safe${node.expression.callee.name} instead of ${node.expression.callee.name}`, + }); + }, }; }, }; From 72a9ecf94f50aab59d4123b8b4b5918b5eed3004 Mon Sep 17 00:00:00 2001 From: Dominic Saadi Date: Wed, 27 Jul 2022 15:55:06 +0900 Subject: [PATCH 015/177] doc: fix typo in packages.md "previous" read like it should be "previously" PR-URL: https://github.com/nodejs/node/pull/44005 Reviewed-By: Geoffrey Booth Reviewed-By: Feng Yu Reviewed-By: Darshan Sen Reviewed-By: Antoine du Hamel --- doc/api/packages.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/packages.md b/doc/api/packages.md index 2dce76f8539c43..2ee71ed77c2e54 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -257,7 +257,7 @@ likely be a breaking change.** To make the introduction of [`"exports"`][] non-breaking, ensure that every previously supported entry point is exported. It is best to explicitly specify entry points so that the package's public API is well-defined. For example, -a project that previous exported `main`, `lib`, +a project that previously exported `main`, `lib`, `feature`, and the `package.json` could use the following `package.exports`: ```json From 04fdc3e1fa74ccd0bc3a2971fa054d142c61acb0 Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Wed, 27 Jul 2022 10:02:07 +0300 Subject: [PATCH 016/177] test_runner: graceful termination on `--test` only PR-URL: https://github.com/nodejs/node/pull/43977 Reviewed-By: Antoine du Hamel Reviewed-By: Nitzan Uziely Reviewed-By: Benjamin Gruenbaum --- lib/internal/test_runner/harness.js | 10 +++-- .../test-runner/never_ending_async.js | 6 +++ .../fixtures/test-runner/never_ending_sync.js | 5 +++ test/parallel/test-runner-exit-code.js | 43 +++++++++++-------- 4 files changed, 43 insertions(+), 21 deletions(-) create mode 100644 test/fixtures/test-runner/never_ending_async.js create mode 100644 test/fixtures/test-runner/never_ending_sync.js diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index 1bdd6e99ed1c3b..5d0ce337ccd10c 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -13,9 +13,10 @@ const { ERR_TEST_FAILURE, }, } = require('internal/errors'); +const { getOptionValue } = require('internal/options'); const { Test, ItTest, Suite } = require('internal/test_runner/test'); - +const isTestRunner = getOptionValue('--test'); const testResources = new SafeMap(); const root = new Test({ __proto__: null, name: '' }); let wasRootSetup = false; @@ -134,8 +135,11 @@ function setup(root) { process.on('uncaughtException', exceptionHandler); process.on('unhandledRejection', rejectionHandler); process.on('beforeExit', exitHandler); - process.on('SIGINT', terminationHandler); - process.on('SIGTERM', terminationHandler); + // TODO(MoLow): Make it configurable to hook when isTestRunner === false. + if (isTestRunner) { + process.on('SIGINT', terminationHandler); + process.on('SIGTERM', terminationHandler); + } root.reporter.pipe(process.stdout); root.reporter.version(); diff --git a/test/fixtures/test-runner/never_ending_async.js b/test/fixtures/test-runner/never_ending_async.js new file mode 100644 index 00000000000000..0f26ea9291fd0d --- /dev/null +++ b/test/fixtures/test-runner/never_ending_async.js @@ -0,0 +1,6 @@ +const test = require('node:test'); +const { setTimeout } = require('timers/promises'); + +// We are using a very large timeout value to ensure that the parent process +// will have time to send a SIGINT signal to cancel the test. +test('never ending test', () => setTimeout(100_000_000)); diff --git a/test/fixtures/test-runner/never_ending_sync.js b/test/fixtures/test-runner/never_ending_sync.js new file mode 100644 index 00000000000000..efc78757b18852 --- /dev/null +++ b/test/fixtures/test-runner/never_ending_sync.js @@ -0,0 +1,5 @@ +const test = require('node:test'); + +test('never ending test', () => { + while (true); +}); diff --git a/test/parallel/test-runner-exit-code.js b/test/parallel/test-runner-exit-code.js index 638ad9853aeefb..1833fa00f7f7ae 100644 --- a/test/parallel/test-runner-exit-code.js +++ b/test/parallel/test-runner-exit-code.js @@ -2,8 +2,29 @@ const common = require('../common'); const fixtures = require('../common/fixtures'); const assert = require('assert'); -const { spawnSync } = require('child_process'); -const { setTimeout } = require('timers/promises'); +const { spawnSync, spawn } = require('child_process'); +const { once } = require('events'); +const { finished } = require('stream/promises'); + +async function runAndKill(file) { + if (common.isWindows) { + common.printSkipMessage(`signals are not supported in windows, skipping ${file}`); + return; + } + let stdout = ''; + const child = spawn(process.execPath, ['--test', file]); + child.stdout.setEncoding('utf8'); + child.stdout.on('data', (chunk) => { + if (!stdout.length) child.kill('SIGINT'); + stdout += chunk; + }); + const [code, signal] = await once(child, 'exit'); + await finished(child.stdout); + assert.match(stdout, /not ok 1/); + assert.match(stdout, /# cancelled 1\n/); + assert.strictEqual(signal, null); + assert.strictEqual(code, 1); +} if (process.argv[2] === 'child') { const test = require('node:test'); @@ -17,12 +38,6 @@ if (process.argv[2] === 'child') { test('failing test', () => { assert.strictEqual(true, false); }); - } else if (process.argv[3] === 'never_ends') { - assert.strictEqual(process.argv[3], 'never_ends'); - test('never ending test', () => { - return setTimeout(100_000_000); - }); - process.kill(process.pid, 'SIGINT'); } else assert.fail('unreachable'); } else { let child = spawnSync(process.execPath, [__filename, 'child', 'pass']); @@ -37,14 +52,6 @@ if (process.argv[2] === 'child') { assert.strictEqual(child.status, 1); assert.strictEqual(child.signal, null); - child = spawnSync(process.execPath, [__filename, 'child', 'never_ends']); - assert.strictEqual(child.status, 1); - assert.strictEqual(child.signal, null); - if (common.isWindows) { - common.printSkipMessage('signals are not supported in windows'); - } else { - const stdout = child.stdout.toString(); - assert.match(stdout, /not ok 1 - never ending test/); - assert.match(stdout, /# cancelled 1/); - } + runAndKill(fixtures.path('test-runner', 'never_ending_sync.js')).then(common.mustCall()); + runAndKill(fixtures.path('test-runner', 'never_ending_async.js')).then(common.mustCall()); } From 5dc39a10bdd33d990e2840a22163c8ac42ff1223 Mon Sep 17 00:00:00 2001 From: ywave620 <60539365+ywave620@users.noreply.github.com> Date: Wed, 27 Jul 2022 17:09:06 +0800 Subject: [PATCH 017/177] http: reuse socket only when it is drained Ensuring every request is assigned to a drained socket or nothing. Because is has no benifit for a request to be attached to a non drained socket and it prevents the request from being assigned to a drained one, which might occur soon or already in the free pool We achieve this by claiming a socket as free only when the socket is drained. PR-URL: https://github.com/nodejs/node/pull/43902 Reviewed-By: Matteo Collina Reviewed-By: Paolo Insogna Reviewed-By: Robert Nagy --- lib/_http_client.js | 12 +- lib/_http_outgoing.js | 4 +- ...st-http-agent-reuse-drained-socket-only.js | 122 ++++++++++++++++++ 3 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 test/parallel/test-http-agent-reuse-drained-socket-only.js diff --git a/lib/_http_client.js b/lib/_http_client.js index 98ebdfc5b77fba..854434533f5b41 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -665,7 +665,7 @@ function parserOnIncomingClient(res, shouldKeepAlive) { // Add our listener first, so that we guarantee socket cleanup res.on('end', responseOnEnd); - req.on('prefinish', requestOnPrefinish); + req.on('finish', requestOnFinish); socket.on('timeout', responseOnTimeout); // If the user did not listen for the 'response' event, then they @@ -737,12 +737,16 @@ function responseOnEnd() { socket.end(); } assert(!socket.writable); - } else if (req.finished && !this.aborted) { + } else if (req.writableFinished && !this.aborted) { + assert(req.finished); // We can assume `req.finished` means all data has been written since: // - `'responseOnEnd'` means we have been assigned a socket. // - when we have a socket we write directly to it without buffering. // - `req.finished` means `end()` has been called and no further data. // can be written + // In addition, `req.writableFinished` means all data written has been + // accepted by the kernel. (i.e. the `req.socket` is drained).Without + // this constraint, we may assign a non drained socket to a request. responseKeepAlive(req); } } @@ -755,7 +759,9 @@ function responseOnTimeout() { res.emit('timeout'); } -function requestOnPrefinish() { +// This function is necessary in the case where we receive the entire reponse +// from server before we finish sending out the request +function requestOnFinish() { const req = this; if (req.shouldKeepAlive && req._ended) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 006ac437a14938..dcdea29968590a 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -985,6 +985,8 @@ OutgoingMessage.prototype.end = function end(chunk, encoding, callback) { }; +// This function is called once all user data are flushed to the socket. +// Note that it has a chance that the socket is not drained. OutgoingMessage.prototype._finish = function _finish() { assert(this.socket); this.emit('prefinish'); @@ -1008,7 +1010,7 @@ OutgoingMessage.prototype._finish = function _finish() { // the socket yet. Thus the outgoing messages need to be prepared to queue // up data internally before sending it on further to the socket's queue. // -// This function, outgoingFlush(), is called by both the Server and Client +// This function, _flush(), is called by both the Server and Client // to attempt to flush any pending messages out to the socket. OutgoingMessage.prototype._flush = function _flush() { const socket = this.socket; diff --git a/test/parallel/test-http-agent-reuse-drained-socket-only.js b/test/parallel/test-http-agent-reuse-drained-socket-only.js new file mode 100644 index 00000000000000..2bd53f40edaaf3 --- /dev/null +++ b/test/parallel/test-http-agent-reuse-drained-socket-only.js @@ -0,0 +1,122 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); +const net = require('net'); + +const agent = new http.Agent({ + keepAlive: true, + maxFreeSockets: Infinity, + maxSockets: Infinity, + maxTotalSockets: Infinity, +}); + +const server = net.createServer({ + pauseOnConnect: true, +}, (sock) => { + // Do not read anything from `sock` + sock.pause(); + sock.write('HTTP/1.1 200 OK\r\nContent-Length: 0\r\nConnection: Keep-Alive\r\n\r\n'); +}); + +server.listen(0, common.mustCall(() => { + sendFstReq(server.address().port); +})); + +function sendFstReq(serverPort) { + const req = http.request({ + agent, + host: '127.0.0.1', + port: serverPort, + }, (res) => { + res.on('data', noop); + res.on('end', common.mustCall(() => { + // Agent's socket reusing code is registered to process.nextTick(), + // and will be run after this function, make sure it take effect. + setImmediate(sendSecReq, serverPort, req.socket.localPort); + })); + }); + + // Make the `req.socket` non drained, i.e. has some data queued to write to + // and accept by the kernel. In Linux and Mac, we only need to call `req.end(aLargeBuffer)`. + // However, in Windows, the mechanism of acceptance is loose, the following code is a workaround + // for Windows. + + /** + * https://docs.microsoft.com/en-US/troubleshoot/windows/win32/data-segment-tcp-winsock says + * + * Winsock uses the following rules to indicate a send completion to the application + * (depending on how the send is invoked, the completion notification could be the + * function returning from a blocking call, signaling an event, or calling a notification + * function, and so forth): + * - If the socket is still within SO_SNDBUF quota, Winsock copies the data from the application + * send and indicates the send completion to the application. + * - If the socket is beyond SO_SNDBUF quota and there's only one previously buffered send still + * in the stack kernel buffer, Winsock copies the data from the application send and indicates + * the send completion to the application. + * - If the socket is beyond SO_SNDBUF quota and there's more than one previously buffered send + * in the stack kernel buffer, Winsock copies the data from the application send. Winsock doesn't + * indicate the send completion to the application until the stack completes enough sends to put + * back the socket within SO_SNDBUF quota or only one outstanding send condition. + */ + + req.on('socket', () => { + req.socket.on('connect', () => { + // Print tcp send buffer information + console.log(process.report.getReport().libuv.filter((handle) => handle.type === 'tcp')); + + const dataLargerThanTCPSendBuf = Buffer.alloc(1024 * 1024 * 64, 0); + + req.write(dataLargerThanTCPSendBuf); + req.uncork(); + if (process.platform === 'win32') { + assert.ok(req.socket.writableLength === 0); + } + + req.write(dataLargerThanTCPSendBuf); + req.uncork(); + if (process.platform === 'win32') { + assert.ok(req.socket.writableLength === 0); + } + + req.end(dataLargerThanTCPSendBuf); + assert.ok(req.socket.writableLength > 0); + }); + }); +} + +function sendSecReq(serverPort, fstReqCliPort) { + // Make the second request, which should be sent on a new socket + // because the first socket is not drained and hence can not be reused + const req = http.request({ + agent, + host: '127.0.0.1', + port: serverPort, + }, (res) => { + res.on('data', noop); + res.on('end', common.mustCall(() => { + setImmediate(sendThrReq, serverPort, req.socket.localPort); + })); + }); + + req.on('socket', common.mustCall((sock) => { + assert.notStrictEqual(sock.localPort, fstReqCliPort); + })); + req.end(); +} + +function sendThrReq(serverPort, secReqCliPort) { + // Make the third request, the agent should reuse the second socket we just made + const req = http.request({ + agent, + host: '127.0.0.1', + port: serverPort, + }, noop); + + req.on('socket', common.mustCall((sock) => { + assert.strictEqual(sock.localPort, secReqCliPort); + process.exit(0); + })); +} + +function noop() { } From 460397709be04e5952e5ef1e55913da9fdb1fffb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 27 Jul 2022 12:11:41 +0200 Subject: [PATCH 018/177] src: remove unnecessary cast in crypto_sig.cc ByteSource::Allocated accepts a void pointer now, so we do not need to cast the argument to a char pointer. Refs: https://github.com/nodejs/node/pull/43202 PR-URL: https://github.com/nodejs/node/pull/44001 Reviewed-By: Filip Skokan Reviewed-By: Darshan Sen Reviewed-By: Feng Yu --- src/crypto/crypto_sig.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/crypto/crypto_sig.cc b/src/crypto/crypto_sig.cc index 23f7ec92ff3439..72f98788d17539 100644 --- a/src/crypto/crypto_sig.cc +++ b/src/crypto/crypto_sig.cc @@ -216,7 +216,7 @@ ByteSource ConvertSignatureToDER( CHECK_NOT_NULL(data); - return ByteSource::Allocated(reinterpret_cast(data), len); + return ByteSource::Allocated(data, len); } void CheckThrow(Environment* env, SignBase::Error error) { From 374b77619bb44660fe2ae1f80597c42835cd96ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 27 Jul 2022 22:20:02 +0200 Subject: [PATCH 019/177] doc: list supported MODP groups explicitly Instead of referring users to perl to find information about supported MODP groups in crypto_groups.h, explicitly list the groups with their respective strengths and with references to the defining RFC sections. Refs: https://github.com/nodejs/node/pull/43896 PR-URL: https://github.com/nodejs/node/pull/43986 Reviewed-By: Luigi Pinca Reviewed-By: Filip Skokan --- doc/api/crypto.md | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/doc/api/crypto.md b/doc/api/crypto.md index e5bcd3b86651c9..ce12d9472dc57f 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -1181,20 +1181,16 @@ const { createDiffieHellmanGroup } = require('node:crypto'); const dh = createDiffieHellmanGroup('modp1'); ``` -The name (e.g. `'modp1'`) is taken from [RFC 2412][] (modp1 and 2) and -[RFC 3526][]: - -```console -$ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h -modp1 # 768 bits -modp2 # 1024 bits -modp5 # 1536 bits -modp14 # 2048 bits -modp15 # etc. -modp16 -modp17 -modp18 -``` +The following groups are supported: + +* `'modp1'` (768 bits, [RFC 2409][] Section 6.1) +* `'modp2'` (1024 bits, [RFC 2409][] Section 6.2) +* `'modp5'` (1536 bits, [RFC 3526][] Section 2) +* `'modp14'` (2048 bits, [RFC 3526][] Section 3) +* `'modp15'` (3072 bits, [RFC 3526][] Section 4) +* `'modp16'` (4096 bits, [RFC 3526][] Section 5) +* `'modp17'` (6144 bits, [RFC 3526][] Section 6) +* `'modp18'` (8192 bits, [RFC 3526][] Section 7) ## Class: `ECDH` @@ -6099,6 +6095,7 @@ See the [list of SSL OP Flags][] for details. [Nonce-Disrespecting Adversaries]: https://github.com/nonce-disrespect/nonce-disrespect [OpenSSL's SPKAC implementation]: https://www.openssl.org/docs/man1.1.0/apps/openssl-spkac.html [RFC 1421]: https://www.rfc-editor.org/rfc/rfc1421.txt +[RFC 2409]: https://www.rfc-editor.org/rfc/rfc2409.txt [RFC 2412]: https://www.rfc-editor.org/rfc/rfc2412.txt [RFC 2818]: https://www.rfc-editor.org/rfc/rfc2818.txt [RFC 3526]: https://www.rfc-editor.org/rfc/rfc3526.txt From 4249276783cf8bbb6aa98af41d15731c4fb6d195 Mon Sep 17 00:00:00 2001 From: legendecas Date: Thu, 28 Jul 2022 08:09:30 +0800 Subject: [PATCH 020/177] src,lib: print source map error source on demand The source context is not prepended to the value of the `stack` property when the source map is not enabled. Rather than prepending the error source context to the value of the `stack` property unconditionally, this patch aligns the behavior and only prints the source context when the error is not handled by userland (e.g. fatal errors). Also, this patch fixes that when source-map support is enabled, the error source context is not pointing to where the error was thrown. PR-URL: https://github.com/nodejs/node/pull/43875 Fixes: https://github.com/nodejs/node/issues/43186 Fixes: https://github.com/nodejs/node/issues/41541 Reviewed-By: Ben Coe Reviewed-By: Matteo Collina --- .eslintignore | 1 + benchmark/es/error-stack.js | 34 +++++++ benchmark/fixtures/simple-error-stack.js | 15 +++ benchmark/fixtures/simple-error-stack.ts | 17 ++++ .../source_map/prepare_stack_trace.js | 44 +++++---- lib/internal/source_map/source_map_cache.js | 91 +++++++++++-------- src/env.h | 1 + src/node_errors.cc | 58 +++++++++++- test/message/source_map_disabled_by_api.js | 6 +- test/message/source_map_disabled_by_api.out | 6 -- test/message/source_map_enabled_by_api.js | 6 +- test/message/source_map_enabled_by_api.out | 10 -- .../message/source_map_enclosing_function.out | 6 +- test/message/source_map_eval.out | 4 + test/message/source_map_throw_catch.out | 3 - test/message/source_map_throw_first_tick.out | 2 +- test/message/source_map_throw_icu.out | 2 +- .../source_map_throw_set_immediate.out | 2 +- 18 files changed, 221 insertions(+), 87 deletions(-) create mode 100644 benchmark/es/error-stack.js create mode 100644 benchmark/fixtures/simple-error-stack.js create mode 100644 benchmark/fixtures/simple-error-stack.ts diff --git a/.eslintignore b/.eslintignore index 5941496e1a6280..153ac6e24f731e 100644 --- a/.eslintignore +++ b/.eslintignore @@ -6,6 +6,7 @@ test/message/esm_display_syntax_error.mjs tools/icu tools/lint-md/lint-md.mjs benchmark/tmp +benchmark/fixtures doc/**/*.js !doc/api_assets/*.js !.eslintrc.js diff --git a/benchmark/es/error-stack.js b/benchmark/es/error-stack.js new file mode 100644 index 00000000000000..907f308ea41558 --- /dev/null +++ b/benchmark/es/error-stack.js @@ -0,0 +1,34 @@ +'use strict'; + +const common = require('../common.js'); +const modPath = require.resolve('../fixtures/simple-error-stack.js'); + +const bench = common.createBenchmark(main, { + method: ['without-sourcemap', 'sourcemap'], + n: [1e5], +}); + +function runN(n) { + delete require.cache[modPath]; + const mod = require(modPath); + bench.start(); + for (let i = 0; i < n; i++) { + mod.simpleErrorStack(); + } + bench.end(n); +} + +function main({ n, method }) { + switch (method) { + case 'without-sourcemap': + process.setSourceMapsEnabled(false); + runN(n); + break; + case 'sourcemap': + process.setSourceMapsEnabled(true); + runN(n); + break; + default: + throw new Error(`Unexpected method "${method}"`); + } +} diff --git a/benchmark/fixtures/simple-error-stack.js b/benchmark/fixtures/simple-error-stack.js new file mode 100644 index 00000000000000..0057807795b072 --- /dev/null +++ b/benchmark/fixtures/simple-error-stack.js @@ -0,0 +1,15 @@ +'use strict'; +exports.__esModule = true; +exports.simpleErrorStack = void 0; +// Compile with `tsc --inlineSourceMap benchmark/fixtures/simple-error-stack.ts`. +var lorem = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'; +function simpleErrorStack() { + try { + lorem.BANG(); + } + catch (e) { + return e.stack; + } +} +exports.simpleErrorStack = simpleErrorStack; +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic2ltcGxlLWVycm9yLXN0YWNrLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsic2ltcGxlLWVycm9yLXN0YWNrLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLFlBQVksQ0FBQzs7O0FBRWIsaUZBQWlGO0FBRWpGLElBQU0sS0FBSyxHQUFHLCtiQUErYixDQUFDO0FBRTljLFNBQVMsZ0JBQWdCO0lBQ3ZCLElBQUk7UUFDRCxLQUFhLENBQUMsSUFBSSxFQUFFLENBQUM7S0FDdkI7SUFBQyxPQUFPLENBQUMsRUFBRTtRQUNWLE9BQU8sQ0FBQyxDQUFDLEtBQUssQ0FBQztLQUNoQjtBQUNILENBQUM7QUFHQyw0Q0FBZ0IifQ== diff --git a/benchmark/fixtures/simple-error-stack.ts b/benchmark/fixtures/simple-error-stack.ts new file mode 100644 index 00000000000000..58034e92f24b98 --- /dev/null +++ b/benchmark/fixtures/simple-error-stack.ts @@ -0,0 +1,17 @@ +'use strict'; + +// Compile with `tsc --inlineSourceMap benchmark/fixtures/simple-error-stack.ts`. + +const lorem = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'; + +function simpleErrorStack() { + try { + (lorem as any).BANG(); + } catch (e) { + return e.stack; + } +} + +export { + simpleErrorStack, +}; diff --git a/lib/internal/source_map/prepare_stack_trace.js b/lib/internal/source_map/prepare_stack_trace.js index 55dc8b344f8be6..d01ca2bedd52b8 100644 --- a/lib/internal/source_map/prepare_stack_trace.js +++ b/lib/internal/source_map/prepare_stack_trace.js @@ -25,6 +25,7 @@ const { kIsNodeError, } = require('internal/errors'); const { fileURLToPath } = require('internal/url'); +const { setGetSourceMapErrorSource } = internalBinding('errors'); // Create a prettified stacktrace, inserting context from source maps // if possible. @@ -53,7 +54,6 @@ const prepareStackTrace = (globalThis, error, trace) => { return errorString; } - let errorSource = ''; let lastSourceMap; let lastFileName; const preparedTrace = ArrayPrototypeJoin(ArrayPrototypeMap(trace, (t, i) => { @@ -62,14 +62,12 @@ const prepareStackTrace = (globalThis, error, trace) => { // A stack trace will often have several call sites in a row within the // same file, cache the source map and file content accordingly: let fileName = t.getFileName(); - let generated = false; if (fileName === undefined) { fileName = t.getEvalOrigin(); - generated = true; } const sm = fileName === lastFileName ? lastSourceMap : - findSourceMap(fileName, generated); + findSourceMap(fileName); lastSourceMap = sm; lastFileName = fileName; if (sm) { @@ -83,14 +81,6 @@ const prepareStackTrace = (globalThis, error, trace) => { if (originalSource && originalLine !== undefined && originalColumn !== undefined) { const name = getOriginalSymbolName(sm, trace, i); - if (i === 0) { - errorSource = getErrorSource( - sm, - originalSource, - originalLine, - originalColumn - ); - } // Construct call site name based on: v8.dev/docs/stack-trace-api: const fnName = t.getFunctionName() ?? t.getMethodName(); const typeName = t.getTypeName(); @@ -116,7 +106,7 @@ const prepareStackTrace = (globalThis, error, trace) => { } return `${str}${t}`; }), ''); - return `${errorSource}${errorString}\n at ${preparedTrace}`; + return `${errorString}\n at ${preparedTrace}`; }; // Transpilers may have removed the original symbol name used in the stack @@ -155,7 +145,7 @@ function getErrorSource( fileURLToPath(originalSourcePath) : originalSourcePath; const source = getOriginalSource( sourceMap.payload, - originalSourcePathNoScheme + originalSourcePath ); const lines = RegExpPrototypeSymbolSplit(/\r?\n/, source, originalLine + 1); const line = lines[originalLine]; @@ -178,28 +168,46 @@ function getErrorSource( function getOriginalSource(payload, originalSourcePath) { let source; - const originalSourcePathNoScheme = - StringPrototypeStartsWith(originalSourcePath, 'file://') ? - fileURLToPath(originalSourcePath) : originalSourcePath; + // payload.sources has been normalized to be an array of absolute urls. const sourceContentIndex = ArrayPrototypeIndexOf(payload.sources, originalSourcePath); if (payload.sourcesContent?.[sourceContentIndex]) { // First we check if the original source content was provided in the // source map itself: source = payload.sourcesContent[sourceContentIndex]; - } else { + } else if (StringPrototypeStartsWith(originalSourcePath, 'file://')) { // If no sourcesContent was found, attempt to load the original source // from disk: + debug(`read source of ${originalSourcePath} from filesystem`); + const originalSourcePathNoScheme = fileURLToPath(originalSourcePath); try { source = readFileSync(originalSourcePathNoScheme, 'utf8'); } catch (err) { debug(err); source = ''; } + } else { + source = ''; } return source; } +function getSourceMapErrorSource(fileName, lineNumber, columnNumber) { + const sm = findSourceMap(fileName); + if (sm === null) { + return; + } + const { + originalLine, + originalColumn, + originalSource, + } = sm.findEntry(lineNumber - 1, columnNumber); + const errorSource = getErrorSource(sm, originalSource, originalLine, originalColumn); + return errorSource; +} + +setGetSourceMapErrorSource(getSourceMapErrorSource); + module.exports = { prepareStackTrace, }; diff --git a/lib/internal/source_map/source_map_cache.js b/lib/internal/source_map/source_map_cache.js index c7770e7a6c733b..ba79540d837849 100644 --- a/lib/internal/source_map/source_map_cache.js +++ b/lib/internal/source_map/source_map_cache.js @@ -41,6 +41,8 @@ const esmSourceMapCache = new SafeMap(); // The generated sources is not mutable, so we can use a Map without memory concerns: const generatedSourceMapCache = new SafeMap(); const kLeadingProtocol = /^\w+:\/\//; +const kSourceMappingURLMagicComment = /\/[*/]#\s+sourceMappingURL=(?[^\s]+)/; +const kSourceURLMagicComment = /\/[*/]#\s+sourceURL=(?[^\s]+)/; const { fileURLToPath, pathToFileURL, URL } = require('internal/url'); let SourceMap; @@ -77,7 +79,22 @@ function setSourceMapsEnabled(val) { sourceMapsEnabled = val; } -function maybeCacheSourceMap(filename, content, cjsModuleInstance, isGeneratedSource) { +function extractSourceURLMagicComment(content) { + const matchSourceURL = RegExpPrototypeExec( + kSourceURLMagicComment, + content + ); + if (matchSourceURL === null) { + return null; + } + let sourceURL = matchSourceURL.groups.sourceURL; + if (sourceURL != null && RegExpPrototypeExec(kLeadingProtocol, sourceURL) === null) { + sourceURL = pathToFileURL(sourceURL).href; + } + return sourceURL; +} + +function maybeCacheSourceMap(filename, content, cjsModuleInstance, isGeneratedSource, sourceURL) { const sourceMapsEnabled = getSourceMapsEnabled(); if (!(process.env.NODE_V8_COVERAGE || sourceMapsEnabled)) return; try { @@ -87,10 +104,10 @@ function maybeCacheSourceMap(filename, content, cjsModuleInstance, isGeneratedSo debug(err); return; } - const match = RegExpPrototypeExec( - /\/[*/]#\s+sourceMappingURL=(?[^\s]+)/, - content, - ); + const match = RegExpPrototypeExec(kSourceMappingURLMagicComment, content); + if (sourceURL === undefined) { + sourceURL = extractSourceURLMagicComment(content); + } if (match) { const data = dataFromUrl(filename, match.groups.sourceMappingURL); const url = data ? null : match.groups.sourceMappingURL; @@ -99,22 +116,33 @@ function maybeCacheSourceMap(filename, content, cjsModuleInstance, isGeneratedSo filename, lineLengths: lineLengths(content), data, - url + url, + sourceURL, }); } else if (isGeneratedSource) { - generatedSourceMapCache.set(filename, { + const entry = { lineLengths: lineLengths(content), data, - url - }); + url, + sourceURL + }; + generatedSourceMapCache.set(filename, entry); + if (sourceURL) { + generatedSourceMapCache.set(sourceURL, entry); + } } else { // If there is no cjsModuleInstance and is not generated source assume we are in a // "modules/esm" context. - esmSourceMapCache.set(filename, { + const entry = { lineLengths: lineLengths(content), data, - url - }); + url, + sourceURL, + }; + esmSourceMapCache.set(filename, entry); + if (sourceURL) { + esmSourceMapCache.set(sourceURL, entry); + } } } } @@ -123,19 +151,12 @@ function maybeCacheGeneratedSourceMap(content) { const sourceMapsEnabled = getSourceMapsEnabled(); if (!(process.env.NODE_V8_COVERAGE || sourceMapsEnabled)) return; - const matchSourceURL = RegExpPrototypeExec( - /\/[*/]#\s+sourceURL=(?[^\s]+)/, - content - ); - if (matchSourceURL == null) { + const sourceURL = extractSourceURLMagicComment(content); + if (sourceURL === null) { return; } - let sourceURL = matchSourceURL.groups.sourceURL; - if (RegExpPrototypeExec(kLeadingProtocol, sourceURL) === null) { - sourceURL = pathToFileURL(sourceURL).href; - } try { - maybeCacheSourceMap(sourceURL, content, null, true); + maybeCacheSourceMap(sourceURL, content, null, true, sourceURL); } catch (err) { // This can happen if the filename is not a valid URL. // If we fail to cache the source map, we should not fail the whole process. @@ -254,33 +275,29 @@ function appendCJSCache(obj) { } } -function findSourceMap(sourceURL, isGenerated) { +function findSourceMap(sourceURL) { if (RegExpPrototypeExec(kLeadingProtocol, sourceURL) === null) { sourceURL = pathToFileURL(sourceURL).href; } if (!SourceMap) { SourceMap = require('internal/source_map/source_map').SourceMap; } - let sourceMap; - if (isGenerated) { - sourceMap = generatedSourceMapCache.get(sourceURL); - } else { - sourceMap = esmSourceMapCache.get(sourceURL); - if (sourceMap === undefined) { - for (const value of cjsSourceMapCache) { - const filename = ObjectGetValueSafe(value, 'filename'); - if (sourceURL === filename) { - sourceMap = { - data: ObjectGetValueSafe(value, 'data') - }; - } + let sourceMap = esmSourceMapCache.get(sourceURL) ?? generatedSourceMapCache.get(sourceURL); + if (sourceMap === undefined) { + for (const value of cjsSourceMapCache) { + const filename = ObjectGetValueSafe(value, 'filename'); + const cachedSourceURL = ObjectGetValueSafe(value, 'sourceURL'); + if (sourceURL === filename || sourceURL === cachedSourceURL) { + sourceMap = { + data: ObjectGetValueSafe(value, 'data') + }; } } } if (sourceMap && sourceMap.data) { return new SourceMap(sourceMap.data); } - return undefined; + return null; } module.exports = { diff --git a/src/env.h b/src/env.h index 8ccad29448f01c..1f25fd0854e5d8 100644 --- a/src/env.h +++ b/src/env.h @@ -525,6 +525,7 @@ class NoArrayBufferZeroFillScope { V(enhance_fatal_stack_after_inspector, v8::Function) \ V(enhance_fatal_stack_before_inspector, v8::Function) \ V(fs_use_promises_symbol, v8::Symbol) \ + V(get_source_map_error_source, v8::Function) \ V(host_import_module_dynamically_callback, v8::Function) \ V(host_initialize_import_meta_object_callback, v8::Function) \ V(http2session_on_altsvc_function, v8::Function) \ diff --git a/src/node_errors.cc b/src/node_errors.cc index 7a4c8257253f3c..e49556383b41f7 100644 --- a/src/node_errors.cc +++ b/src/node_errors.cc @@ -49,6 +49,40 @@ namespace per_process { static Mutex tty_mutex; } // namespace per_process +static std::string GetSourceMapErrorSource(Isolate* isolate, + Local context, + Local message, + bool* added_exception_line) { + v8::TryCatch try_catch(isolate); + HandleScope handle_scope(isolate); + Environment* env = Environment::GetCurrent(context); + + // The ScriptResourceName of the message may be different from the one we use + // to compile the script. V8 replaces it when it detects magic comments in + // the source texts. + Local script_resource_name = message->GetScriptResourceName(); + int linenum = message->GetLineNumber(context).FromJust(); + int columnum = message->GetStartColumn(context).FromJust(); + + Local argv[] = {script_resource_name, + v8::Int32::New(isolate, linenum), + v8::Int32::New(isolate, columnum)}; + MaybeLocal maybe_ret = env->get_source_map_error_source()->Call( + context, Undefined(isolate), arraysize(argv), argv); + Local ret; + if (!maybe_ret.ToLocal(&ret)) { + // Ignore the caught exceptions. + DCHECK(try_catch.HasCaught()); + return std::string(); + } + if (!ret->IsString()) { + return std::string(); + } + *added_exception_line = true; + node::Utf8Value error_source_utf8(isolate, ret.As()); + return *error_source_utf8; +} + static std::string GetErrorSource(Isolate* isolate, Local context, Local message, @@ -58,6 +92,10 @@ static std::string GetErrorSource(Isolate* isolate, std::string sourceline(*encoded_source, encoded_source.length()); *added_exception_line = false; + if (sourceline.find("node-do-not-add-exception-line") != std::string::npos) { + return sourceline; + } + // If source maps have been enabled, the exception line will instead be // added in the JavaScript context: Environment* env = Environment::GetCurrent(isolate); @@ -65,11 +103,9 @@ static std::string GetErrorSource(Isolate* isolate, !message->GetScriptOrigin().SourceMapUrl().IsEmpty() && !message->GetScriptOrigin().SourceMapUrl()->IsUndefined(); if (has_source_map_url && env != nullptr && env->source_maps_enabled()) { - return sourceline; - } - - if (sourceline.find("node-do-not-add-exception-line") != std::string::npos) { - return sourceline; + std::string source = GetSourceMapErrorSource( + isolate, context, message, added_exception_line); + return added_exception_line ? source : sourceline; } // Because of how node modules work, all scripts are wrapped with a @@ -869,6 +905,13 @@ static void SetSourceMapsEnabled(const FunctionCallbackInfo& args) { env->set_source_maps_enabled(args[0].As()->Value()); } +static void SetGetSourceMapErrorSource( + const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + CHECK(args[0]->IsFunction()); + env->set_get_source_map_error_source(args[0].As()); +} + static void SetMaybeCacheGeneratedSourceMap( const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -909,6 +952,7 @@ static void TriggerUncaughtException(const FunctionCallbackInfo& args) { void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(SetPrepareStackTraceCallback); + registry->Register(SetGetSourceMapErrorSource); registry->Register(SetSourceMapsEnabled); registry->Register(SetMaybeCacheGeneratedSourceMap); registry->Register(SetEnhanceStackForFatalException); @@ -924,6 +968,10 @@ void Initialize(Local target, target, "setPrepareStackTraceCallback", SetPrepareStackTraceCallback); + SetMethod(context, + target, + "setGetSourceMapErrorSource", + SetGetSourceMapErrorSource); SetMethod(context, target, "setSourceMapsEnabled", SetSourceMapsEnabled); SetMethod(context, target, diff --git a/test/message/source_map_disabled_by_api.js b/test/message/source_map_disabled_by_api.js index 8405e4f3661cff..db55d24e1d5df8 100644 --- a/test/message/source_map_disabled_by_api.js +++ b/test/message/source_map_disabled_by_api.js @@ -17,4 +17,8 @@ delete require.cache[require // Re-enable. process.setSourceMapsEnabled(true); -require('../fixtures/source-map/enclosing-call-site-min.js'); +try { + require('../fixtures/source-map/enclosing-call-site-min.js'); +} catch (e) { + console.log(e); +} diff --git a/test/message/source_map_disabled_by_api.out b/test/message/source_map_disabled_by_api.out index 70979849795e11..f3f93758c1d272 100644 --- a/test/message/source_map_disabled_by_api.out +++ b/test/message/source_map_disabled_by_api.out @@ -9,10 +9,6 @@ Error: an error! at Module.load (node:internal/modules/cjs/loader:*) at Module._load (node:internal/modules/cjs/loader:*) at Module.require (node:internal/modules/cjs/loader:*) -*enclosing-call-site.js:16 - throw new Error('an error!') - ^ - Error: an error! at functionD (*enclosing-call-site.js:16:17) at functionC (*enclosing-call-site.js:10:3) @@ -24,5 +20,3 @@ Error: an error! at Module.load (node:internal/modules/cjs/loader:*) at Module._load (node:internal/modules/cjs/loader:*) at Module.require (node:internal/modules/cjs/loader:*) - -Node.js * diff --git a/test/message/source_map_enabled_by_api.js b/test/message/source_map_enabled_by_api.js index 3133bd26399fd4..29e0f9708903a8 100644 --- a/test/message/source_map_enabled_by_api.js +++ b/test/message/source_map_enabled_by_api.js @@ -14,4 +14,8 @@ delete require.cache[require process.setSourceMapsEnabled(false); -require('../fixtures/source-map/enclosing-call-site-min.js'); +try { + require('../fixtures/source-map/enclosing-call-site-min.js'); +} catch (e) { + console.log(e); +} diff --git a/test/message/source_map_enabled_by_api.out b/test/message/source_map_enabled_by_api.out index 9af1810020cf3f..97547436db091b 100644 --- a/test/message/source_map_enabled_by_api.out +++ b/test/message/source_map_enabled_by_api.out @@ -1,7 +1,3 @@ -*enclosing-call-site.js:16 - throw new Error('an error!') - ^ - Error: an error! at functionD (*enclosing-call-site.js:16:17) at functionC (*enclosing-call-site.js:10:3) @@ -13,10 +9,6 @@ Error: an error! at Module.load (node:internal/modules/cjs/loader:*) at Module._load (node:internal/modules/cjs/loader:*) at Module.require (node:internal/modules/cjs/loader:*) -*enclosing-call-site-min.js:1 -var functionA=function(){functionB()};function functionB(){functionC()}var functionC=function(){functionD()},functionD=function(){if(0 Date: Sun, 19 Jun 2022 21:40:49 +0900 Subject: [PATCH 021/177] lib: add missing env vars to --help PR-URL: https://github.com/nodejs/node/pull/43492 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Feng Yu --- lib/internal/main/print_help.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/internal/main/print_help.js b/lib/internal/main/print_help.js index a2cee4b0567649..f8fc5a6075f104 100644 --- a/lib/internal/main/print_help.js +++ b/lib/internal/main/print_help.js @@ -55,10 +55,15 @@ const envVars = new SafeMap(ArrayPrototypeConcat([ 'of stderr' }], ['NODE_REPL_HISTORY', { helpText: 'path to the persistent REPL ' + 'history file' }], + ['NODE_REPL_EXTERNAL_MODULE', { helpText: 'path to a Node.js module ' + + 'which will be loaded in place of the built-in REPL' }], + ['NODE_SKIP_PLATFORM_CHECK', { helpText: 'set to 1 to skip ' + + 'the check for a supported platform during Node.js startup' }], ['NODE_TLS_REJECT_UNAUTHORIZED', { helpText: 'set to 0 to disable TLS ' + 'certificate validation' }], ['NODE_V8_COVERAGE', { helpText: 'directory to output v8 coverage JSON ' + 'to' }], + ['TZ', { helpText: 'specify the timezone configuration' }], ['UV_THREADPOOL_SIZE', { helpText: 'sets the number of threads used in ' + 'libuv\'s threadpool' }], ], hasIntl ? [ From 222ecd6e14b5301fe934a53a350cf2ed14df8686 Mon Sep 17 00:00:00 2001 From: cola119 Date: Sun, 19 Jun 2022 21:40:52 +0900 Subject: [PATCH 022/177] doc: add missing env vars to man page PR-URL: https://github.com/nodejs/node/pull/43492 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Feng Yu --- doc/node.1 | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/node.1 b/doc/node.1 index 97352f4a10629d..2a1e80c96dfc1d 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -648,6 +648,7 @@ instead of printing to stderr. Equivalent to passing .Fl -redirect-warnings Ar file on the command line. +. .It Ev NODE_REPL_HISTORY Ar file Path to the .Ar file @@ -657,6 +658,10 @@ The default path is which is overridden by this variable. Setting the value to an empty string ("" or " ") will disable persistent REPL history. . +.It Ev NODE_REPL_EXTERNAL_MODULE Ar file +Path to a Node.js module which will be loaded in place of the built-in REPL. +Overriding this value to an empty string (`''`) will use the built-in REPL. +. .It Ev NODE_SKIP_PLATFORM_CHECK When set to .Ar 1 , @@ -692,6 +697,9 @@ If .Fl -use-openssl-ca is enabled, this overrides and sets OpenSSL's file containing trusted certificates. . +.It Ev TZ +Specify the timezone configuration. +. .It Ev UV_THREADPOOL_SIZE Ar size Sets the number of threads used in libuv's threadpool to .Ar size . From d23dfa4dcbf4d8eacbd375cfc5b6b84606968320 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 27 Jul 2022 17:56:12 +0000 Subject: [PATCH 023/177] doc: remove old reference from crypto/README.md The referenced header file does not exist anymore. Refs: https://github.com/nodejs/node/pull/43896 PR-URL: https://github.com/nodejs/node/pull/44012 Reviewed-By: Filip Skokan Reviewed-By: Richard Lau Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Luigi Pinca Reviewed-By: Benjamin Gruenbaum Reviewed-By: Feng Yu --- src/crypto/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/src/crypto/README.md b/src/crypto/README.md index e78db18ea4e881..1f5e5b20448499 100644 --- a/src/crypto/README.md +++ b/src/crypto/README.md @@ -22,7 +22,6 @@ the various other crypto files and other parts of Node.js: * `crypto_util.h` / `crypto_util.cc` (Core crypto definitions) * `crypto_common.h` / `crypto_common.cc` (Shared TLS utility functions) * `crypto_bio.h` / `crypto_bio.cc` (Custom OpenSSL i/o implementation) -* `crypto_groups.h` (modp group definitions) Of these, `crypto_util.h` and `crypto_util.cc` are the most important, as they provide the core declarations and utility functions used most extensively From 9763e2fba98b430637c8cdc1ed16f46a0dcddbd4 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Wed, 27 Jul 2022 17:54:57 +0200 Subject: [PATCH 024/177] src: fix typo in src/README.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44009 Reviewed-By: Rafael Gonzaga Reviewed-By: Richard Lau Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen --- src/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/README.md b/src/README.md index b48402bacefae2..314cd343418cbf 100644 --- a/src/README.md +++ b/src/README.md @@ -620,7 +620,7 @@ v8::Maybe SumNumbers(v8::Local context, for (uint32_t i = 0; i < array_of_integers->Length(); i++) { v8::Local entry; - if (array_of_integers->Get(context, i).ToLocal(&entry)) { + if (!array_of_integers->Get(context, i).ToLocal(&entry)) { // Oops, we might have hit a getter that throws an exception! // It's better to not continue return an empty (“nothing”) Maybe. return v8::Nothing(); From 50c854bbfe4d399d4b9c285dce2af4032f5b4836 Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Thu, 28 Jul 2022 12:54:09 +0300 Subject: [PATCH 025/177] test_runner: fix top level `describe` queuing PR-URL: https://github.com/nodejs/node/pull/43998 Reviewed-By: Antoine du Hamel Reviewed-By: Benjamin Gruenbaum --- lib/internal/test_runner/test.js | 17 ++- test/message/test_runner_describe_it.js | 55 +++++++-- test/message/test_runner_describe_it.out | 141 +++++++++++++---------- test/message/test_runner_output.js | 22 ++++ test/message/test_runner_output.out | 23 +++- test/parallel/test-runner-concurrency.js | 39 ++++++- 6 files changed, 208 insertions(+), 89 deletions(-) diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 89b8c196038549..14011f0c28ad30 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -561,7 +561,12 @@ class Suite extends Test { try { const context = { signal: this.signal }; - this.buildSuite = this.runInAsyncScope(this.fn, context, [context]); + this.buildSuite = PromisePrototypeThen( + PromiseResolve(this.runInAsyncScope(this.fn, context, [context])), + undefined, + (err) => { + this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); + }); } catch (err) { this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); } @@ -569,17 +574,9 @@ class Suite extends Test { this.buildPhaseFinished = true; } - start() { - return this.run(); - } - async run() { - try { - await this.buildSuite; - } catch (err) { - this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); - } this.parent.activeSubtests++; + await this.buildSuite; this.startTime = hrtime(); if (this[kShouldAbort]()) { diff --git a/test/message/test_runner_describe_it.js b/test/message/test_runner_describe_it.js index c272fb38a749f6..24b83041d4ad03 100644 --- a/test/message/test_runner_describe_it.js +++ b/test/message/test_runner_describe_it.js @@ -149,17 +149,6 @@ describe('level 0a', { concurrency: 4 }, () => { return p0a; }); -describe('top level', { concurrency: 2 }, () => { - it('+long running', async () => { - return new Promise((resolve, reject) => { - setTimeout(resolve, 3000).unref(); - }); - }); - - describe('+short running', async () => { - it('++short running', async () => {}); - }); -}); describe('invalid subtest - pass but subtest fails', () => { setImmediate(() => { @@ -339,3 +328,47 @@ describe('timeouts', () => { setTimeout(done, 10); }); }); + +describe('successful thenable', () => { + it('successful thenable', () => { + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (successHandler) => successHandler(); + }, + }; + }); + + it('rejected thenable', () => { + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (_, errorHandler) => errorHandler(new Error('custom error')); + }, + }; + }); + + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (successHandler) => successHandler(); + }, + }; +}); + +describe('rejected thenable', () => { + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (_, errorHandler) => errorHandler(new Error('custom error')); + }, + }; +}); diff --git a/test/message/test_runner_describe_it.out b/test/message/test_runner_describe_it.out index 7961345b976f73..cef7f8c94a8098 100644 --- a/test/message/test_runner_describe_it.out +++ b/test/message/test_runner_describe_it.out @@ -24,6 +24,7 @@ not ok 3 - sync fail todo # TODO * * * + * ... # Subtest: sync fail todo with message not ok 4 - sync fail todo with message # TODO this is a failing todo @@ -74,6 +75,7 @@ not ok 8 - sync throw fail * * * + * ... # Subtest: async skip pass ok 9 - async skip pass # SKIP @@ -100,6 +102,7 @@ not ok 11 - async throw fail * * * + * ... # Subtest: async skip fail not ok 12 - async skip fail @@ -128,6 +131,7 @@ not ok 13 - async assertion fail * * * + * ... # Subtest: resolve pass ok 14 - resolve pass @@ -149,6 +153,7 @@ not ok 15 - reject fail * * * + * ... # Subtest: unhandled rejection - passes but warns ok 16 - unhandled rejection - passes but warns @@ -237,45 +242,23 @@ ok 23 - level 0a --- duration_ms: * ... -# Subtest: top level - # Subtest: +long running - ok 1 - +long running - --- - duration_ms: * - ... - # Subtest: +short running - # Subtest: ++short running - ok 1 - ++short running - --- - duration_ms: * - ... - 1..1 - ok 2 - +short running - --- - duration_ms: * - ... - 1..2 -ok 24 - top level - --- - duration_ms: * - ... # Subtest: invalid subtest - pass but subtest fails -ok 25 - invalid subtest - pass but subtest fails +ok 24 - invalid subtest - pass but subtest fails --- duration_ms: * ... # Subtest: sync skip option -ok 26 - sync skip option # SKIP +ok 25 - sync skip option # SKIP --- duration_ms: * ... # Subtest: sync skip option with message -ok 27 - sync skip option with message # SKIP this is skipped +ok 26 - sync skip option with message # SKIP this is skipped --- duration_ms: * ... # Subtest: sync skip option is false fail -not ok 28 - sync skip option is false fail +not ok 27 - sync skip option is false fail --- duration_ms: * failureType: 'testCodeFailure' @@ -291,67 +274,67 @@ not ok 28 - sync skip option is false fail * ... # Subtest: -ok 29 - +ok 28 - --- duration_ms: * ... # Subtest: functionOnly -ok 30 - functionOnly +ok 29 - functionOnly --- duration_ms: * ... # Subtest: -ok 31 - +ok 30 - --- duration_ms: * ... # Subtest: test with only a name provided -ok 32 - test with only a name provided +ok 31 - test with only a name provided --- duration_ms: * ... # Subtest: -ok 33 - +ok 32 - --- duration_ms: * ... # Subtest: -ok 34 - # SKIP +ok 33 - # SKIP --- duration_ms: * ... # Subtest: test with a name and options provided -ok 35 - test with a name and options provided # SKIP +ok 34 - test with a name and options provided # SKIP --- duration_ms: * ... # Subtest: functionAndOptions -ok 36 - functionAndOptions # SKIP +ok 35 - functionAndOptions # SKIP --- duration_ms: * ... # Subtest: escaped description \\ \# \\\#\\ -ok 37 - escaped description \\ \# \\\#\\ +ok 36 - escaped description \\ \# \\\#\\ --- duration_ms: * ... # Subtest: escaped skip message -ok 38 - escaped skip message # SKIP \#skip +ok 37 - escaped skip message # SKIP \#skip --- duration_ms: * ... # Subtest: escaped todo message -ok 39 - escaped todo message # TODO \#todo +ok 38 - escaped todo message # TODO \#todo --- duration_ms: * ... # Subtest: callback pass -ok 40 - callback pass +ok 39 - callback pass --- duration_ms: * ... # Subtest: callback fail -not ok 41 - callback fail +not ok 40 - callback fail --- duration_ms: * failureType: 'testCodeFailure' @@ -362,22 +345,22 @@ not ok 41 - callback fail * ... # Subtest: sync t is this in test -ok 42 - sync t is this in test +ok 41 - sync t is this in test --- duration_ms: * ... # Subtest: async t is this in test -ok 43 - async t is this in test +ok 42 - async t is this in test --- duration_ms: * ... # Subtest: callback t is this in test -ok 44 - callback t is this in test +ok 43 - callback t is this in test --- duration_ms: * ... # Subtest: callback also returns a Promise -not ok 45 - callback also returns a Promise +not ok 44 - callback also returns a Promise --- duration_ms: * failureType: 'callbackAndPromisePresent' @@ -385,7 +368,7 @@ not ok 45 - callback also returns a Promise code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 46 - callback throw +not ok 45 - callback throw --- duration_ms: * failureType: 'testCodeFailure' @@ -401,7 +384,7 @@ not ok 46 - callback throw * ... # Subtest: callback called twice -not ok 47 - callback called twice +not ok 46 - callback called twice --- duration_ms: * failureType: 'multipleCallbackInvocations' @@ -412,12 +395,12 @@ not ok 47 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 48 - callback called twice in different ticks +ok 47 - callback called twice in different ticks --- duration_ms: * ... # Subtest: callback called twice in future tick -not ok 49 - callback called twice in future tick +not ok 48 - callback called twice in future tick --- duration_ms: * failureType: 'uncaughtException' @@ -427,7 +410,7 @@ not ok 49 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 50 - callback async throw +not ok 49 - callback async throw --- duration_ms: * failureType: 'uncaughtException' @@ -437,12 +420,12 @@ not ok 50 - callback async throw * ... # Subtest: callback async throw after done -ok 51 - callback async throw after done +ok 50 - callback async throw after done --- duration_ms: * ... # Subtest: custom inspect symbol fail -not ok 52 - custom inspect symbol fail +not ok 51 - custom inspect symbol fail --- duration_ms: * failureType: 'testCodeFailure' @@ -450,7 +433,7 @@ not ok 52 - custom inspect symbol fail code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 53 - custom inspect symbol that throws fail +not ok 52 - custom inspect symbol that throws fail --- duration_ms: * failureType: 'testCodeFailure' @@ -501,7 +484,7 @@ not ok 53 - custom inspect symbol that throws fail * ... 1..2 -not ok 54 - subtest sync throw fails +not ok 53 - subtest sync throw fails --- duration_ms: * failureType: 'subtestsFailed' @@ -518,7 +501,7 @@ not ok 54 - subtest sync throw fails code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 55 - describe sync throw fails +not ok 54 - describe sync throw fails --- duration_ms: * failureType: 'testCodeFailure' @@ -546,7 +529,7 @@ not ok 55 - describe sync throw fails code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 56 - describe async throw fails +not ok 55 - describe async throw fails --- duration_ms: * failureType: 'testCodeFailure' @@ -573,7 +556,7 @@ not ok 56 - describe async throw fails error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' stack: |- - * + async Promise.all (index 0) ... # Subtest: timed out callback test not ok 2 - timed out callback test @@ -594,15 +577,51 @@ not ok 56 - describe async throw fails duration_ms: * ... 1..4 -not ok 57 - timeouts +not ok 56 - timeouts --- duration_ms: * failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' ... +# Subtest: successful thenable + # Subtest: successful thenable + ok 1 - successful thenable + --- + duration_ms: * + ... + # Subtest: rejected thenable + not ok 2 - rejected thenable + --- + duration_ms: * + failureType: 'testCodeFailure' + error: 'custom error' + code: 'ERR_TEST_FAILURE' + stack: |- + * + * + ... + 1..2 +not ok 57 - successful thenable + --- + duration_ms: * + failureType: 'subtestsFailed' + error: '1 subtest failed' + code: 'ERR_TEST_FAILURE' + ... +# Subtest: rejected thenable +not ok 58 - rejected thenable + --- + duration_ms: * + failureType: 'testCodeFailure' + error: 'custom error' + code: 'ERR_TEST_FAILURE' + stack: |- + * + * + ... # Subtest: invalid subtest fail -not ok 58 - invalid subtest fail +not ok 59 - invalid subtest fail --- duration_ms: * failureType: 'parentAlreadyFinished' @@ -611,16 +630,16 @@ not ok 58 - invalid subtest fail stack: |- * ... -1..58 +1..59 # Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "immediate throw - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 58 -# pass 23 -# fail 21 +# tests 59 +# pass 22 +# fail 23 # cancelled 0 # skipped 9 # todo 5 diff --git a/test/message/test_runner_output.js b/test/message/test_runner_output.js index 33e17e6a082ca7..8fce194f56d2b7 100644 --- a/test/message/test_runner_output.js +++ b/test/message/test_runner_output.js @@ -349,3 +349,25 @@ test('large timeout async test is ok', { timeout: 30_000_000 }, async (t) => { test('large timeout callback test is ok', { timeout: 30_000_000 }, (t, done) => { setTimeout(done, 10); }); + +test('successful thenable', () => { + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (successHandler) => successHandler(); + }, + }; +}); + +test('rejected thenable', () => { + let thenCalled = false; + return { + get then() { + if (thenCalled) throw new Error(); + thenCalled = true; + return (_, errorHandler) => errorHandler('custom error'); + }, + }; +}); diff --git a/test/message/test_runner_output.out b/test/message/test_runner_output.out index 4e987f8e9f4b94..49a19fe302bbb0 100644 --- a/test/message/test_runner_output.out +++ b/test/message/test_runner_output.out @@ -588,8 +588,21 @@ ok 60 - large timeout callback test is ok --- duration_ms: * ... +# Subtest: successful thenable +ok 61 - successful thenable + --- + duration_ms: * + ... +# Subtest: rejected thenable +not ok 62 - rejected thenable + --- + duration_ms: * + failureType: 'testCodeFailure' + error: 'custom error' + code: 'ERR_TEST_FAILURE' + ... # Subtest: invalid subtest fail -not ok 61 - invalid subtest fail +not ok 63 - invalid subtest fail --- duration_ms: * failureType: 'parentAlreadyFinished' @@ -598,16 +611,16 @@ not ok 61 - invalid subtest fail stack: |- * ... -1..61 +1..63 # Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "immediate throw - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 61 -# pass 26 -# fail 18 +# tests 63 +# pass 27 +# fail 19 # cancelled 2 # skipped 10 # todo 5 diff --git a/test/parallel/test-runner-concurrency.js b/test/parallel/test-runner-concurrency.js index 802cff3e9be375..8d756971d68551 100644 --- a/test/parallel/test-runner-concurrency.js +++ b/test/parallel/test-runner-concurrency.js @@ -1,6 +1,6 @@ 'use strict'; -require('../common'); -const { describe, it } = require('node:test'); +const common = require('../common'); +const { describe, it, test } = require('node:test'); const assert = require('assert'); describe('Concurrency option (boolean) = true ', { concurrency: true }, () => { @@ -27,3 +27,38 @@ describe( }); } ); + +{ + // Make sure tests run in order when root concurrency is 1 (default) + const tree = []; + const expectedTestTree = common.mustCall(() => { + assert.deepStrictEqual(tree, [ + 'suite 1', 'nested', 'suite 2', + '1', '2', 'nested 1', 'nested 2', + 'test', 'test 1', 'test 2', + ]); + }); + + describe('suite 1', () => { + tree.push('suite 1'); + it('1', () => tree.push('1')); + it('2', () => tree.push('2')); + + describe('nested', () => { + tree.push('nested'); + it('nested 1', () => tree.push('nested 1')); + it('nested 2', () => tree.push('nested 2')); + }); + }); + + test('test', async (t) => { + tree.push('test'); + await t.test('test1', () => tree.push('test 1')); + await t.test('test 2', () => tree.push('test 2')); + }); + + describe('suite 2', () => { + tree.push('suite 2'); + it('should run after other suites', expectedTestTree); + }); +} From b7aaf3d4ca788afe955a872bb7aee06df2241b16 Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Thu, 28 Jul 2022 11:03:27 -0700 Subject: [PATCH 026/177] deps: upgrade npm to 8.15.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44013 Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Tobias Nießen Reviewed-By: Filip Skokan Reviewed-By: Luigi Pinca --- deps/npm/docs/content/commands/npm-ci.md | 253 ++++++++++++++++- deps/npm/docs/content/commands/npm-init.md | 2 + .../content/commands/npm-install-ci-test.md | 253 ++++++++++++++++- deps/npm/docs/content/commands/npm-install.md | 2 +- deps/npm/docs/content/commands/npm-ls.md | 4 +- .../content/configuring-npm/package-json.md | 2 +- deps/npm/docs/content/using-npm/workspaces.md | 10 +- deps/npm/docs/output/commands/npm-ci.html | 211 +++++++++++++- deps/npm/docs/output/commands/npm-init.html | 2 + .../output/commands/npm-install-ci-test.html | 211 +++++++++++++- .../npm/docs/output/commands/npm-install.html | 2 +- deps/npm/docs/output/commands/npm-ls.html | 6 +- deps/npm/docs/output/commands/npm.html | 2 +- .../output/configuring-npm/package-json.html | 2 +- .../npm/docs/output/using-npm/workspaces.html | 9 +- deps/npm/lib/commands/ci.js | 9 +- deps/npm/lib/commands/diff.js | 14 +- deps/npm/lib/commands/init.js | 7 +- deps/npm/lib/commands/link.js | 4 +- deps/npm/lib/npm.js | 10 +- deps/npm/lib/utils/log-file.js | 6 +- deps/npm/man/man1/npm-ci.1 | 266 +++++++++++++++++- deps/npm/man/man1/npm-init.1 | 4 + deps/npm/man/man1/npm-install-ci-test.1 | 266 +++++++++++++++++- deps/npm/man/man1/npm-install.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 6 +- deps/npm/man/man1/npm.1 | 2 +- deps/npm/man/man5/package-json.5 | 2 +- deps/npm/man/man7/workspaces.7 | 12 +- .../arborist/lib/arborist/build-ideal-tree.js | 4 +- .../arborist/lib/arborist/load-actual.js | 2 +- .../arborist/lib/arborist/load-virtual.js | 2 +- .../@npmcli/arborist/lib/arborist/reify.js | 2 +- .../arborist/lib/consistent-resolve.js | 4 +- .../node_modules/@npmcli/arborist/lib/link.js | 2 +- .../node_modules/@npmcli/arborist/lib/node.js | 2 +- .../@npmcli/arborist/lib/shrinkwrap.js | 6 +- .../@npmcli/arborist/package.json | 2 +- deps/npm/package.json | 2 +- .../test/lib/commands/link.js.test.cjs | 5 + .../test/lib/load-all-commands.js.test.cjs | 18 +- .../tap-snapshots/test/lib/npm.js.test.cjs | 18 +- deps/npm/test/fixtures/mock-npm.js | 36 +-- deps/npm/test/lib/commands/init.js | 38 +++ deps/npm/test/lib/commands/link.js | 36 +++ deps/npm/test/lib/npm.js | 46 ++- 46 files changed, 1651 insertions(+), 155 deletions(-) diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 9b8238d05a3b91..30a03365ade814 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -84,15 +84,129 @@ cache: -#### `audit` +#### `save` + +* Default: `true` unless when using `npm update` where it defaults to `false` +* Type: Boolean + +Save installed packages to a `package.json` file as dependencies. + +When used with the `npm rm` command, removes the dependency from +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. + + + + +#### `save-exact` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. + + + + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + + + + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + + + + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + + + + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + + + + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + + + + +#### `package-lock` * Default: true * Type: Boolean -When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes. See the -documentation for [`npm audit`](/commands/npm-audit) for details on what is -submitted. +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +This configuration does not affect `npm ci`. @@ -127,13 +241,132 @@ will *not* run any pre- or post-scripts. -#### `script-shell` +#### `audit` -* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows -* Type: null or String +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + + + + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + + + + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + + + + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + + + + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result in selecting all + workspaces within that folder) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + + + + +#### `workspaces` + +* Default: null +* Type: null or Boolean + +Set to true to run the command in the context of **all** configured +workspaces. + +Explicitly setting this to false will cause commands like `install` to +ignore workspaces altogether. When not set explicitly: + +- Commands that operate on the `node_modules` tree (install, update, etc.) +will link workspaces into the `node_modules` folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +_unless_ one or more workspaces are specified in the `workspace` config. + +This value is not exported to the environment for child processes. + + + + +#### `include-workspace-root` + +* Default: false +* Type: Boolean + +Include the workspace root when workspaces are enabled for a command. + +When false, specifying individual workspaces via the `workspace` config, or +all workspaces via the `workspaces` flag, will cause npm to operate only on +the specified workspaces, and not on the root project. + +This value is not exported to the environment for child processes. + + + + +#### `install-links` + +* Default: false +* Type: Boolean -The shell to use for scripts run with the `npm exec`, `npm run` and `npm -init ` commands. +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. diff --git a/deps/npm/docs/content/commands/npm-init.md b/deps/npm/docs/content/commands/npm-init.md index cd0be4643e0ead..f3124a7768dfce 100644 --- a/deps/npm/docs/content/commands/npm-init.md +++ b/deps/npm/docs/content/commands/npm-init.md @@ -38,6 +38,8 @@ follows: * `npm init foo` -> `npm exec create-foo` * `npm init @usr/foo` -> `npm exec @usr/create-foo` * `npm init @usr` -> `npm exec @usr/create` +* `npm init @usr@2.0.0` -> `npm exec @usr/create@2.0.0` +* `npm init @usr/foo@2.0.0` -> `npm exec @usr/create-foo@2.0.0` If the initializer is omitted (by just calling `npm init`), init will fall back to legacy init behavior. It will ask you a bunch of questions, and diff --git a/deps/npm/docs/content/commands/npm-install-ci-test.md b/deps/npm/docs/content/commands/npm-install-ci-test.md index 74ed4667e81bfe..b886f8ab9599ad 100644 --- a/deps/npm/docs/content/commands/npm-install-ci-test.md +++ b/deps/npm/docs/content/commands/npm-install-ci-test.md @@ -30,15 +30,129 @@ This command runs `npm ci` followed immediately by `npm test`. -#### `audit` +#### `save` + +* Default: `true` unless when using `npm update` where it defaults to `false` +* Type: Boolean + +Save installed packages to a `package.json` file as dependencies. + +When used with the `npm rm` command, removes the dependency from +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. + + + + +#### `save-exact` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. + + + + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + + + + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + + + + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + + + + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + + + + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + + + + +#### `package-lock` * Default: true * Type: Boolean -When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes. See the -documentation for [`npm audit`](/commands/npm-audit) for details on what is -submitted. +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +This configuration does not affect `npm ci`. @@ -73,13 +187,132 @@ will *not* run any pre- or post-scripts. -#### `script-shell` +#### `audit` -* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows -* Type: null or String +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + + + + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + + + + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + + + + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + + + + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result in selecting all + workspaces within that folder) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + + + + +#### `workspaces` + +* Default: null +* Type: null or Boolean + +Set to true to run the command in the context of **all** configured +workspaces. + +Explicitly setting this to false will cause commands like `install` to +ignore workspaces altogether. When not set explicitly: + +- Commands that operate on the `node_modules` tree (install, update, etc.) +will link workspaces into the `node_modules` folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +_unless_ one or more workspaces are specified in the `workspace` config. + +This value is not exported to the environment for child processes. + + + + +#### `include-workspace-root` + +* Default: false +* Type: Boolean + +Include the workspace root when workspaces are enabled for a command. + +When false, specifying individual workspaces via the `workspace` config, or +all workspaces via the `workspaces` flag, will cause npm to operate only on +the specified workspaces, and not on the root project. + +This value is not exported to the environment for child processes. + + + + +#### `install-links` + +* Default: false +* Type: Boolean -The shell to use for scripts run with the `npm exec`, `npm run` and `npm -init ` commands. +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 7e5544f85e3dda..35e0df22777336 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -311,7 +311,7 @@ into a tarball (b). can be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency. If neither `#` or - `#semver:` is specified, then `master` is used. + `#semver:` is specified, then the default branch is used. As with regular git dependencies, `dependencies` and `devDependencies` will be installed if the package has a `prepare` script before the diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index a97c5168e6e0b4..a7936fafc72a2c 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -44,7 +44,7 @@ npm@@VERSION@ /path/to/npm It will print out extraneous, missing, and invalid packages. If a project specifies git urls for dependencies these are shown -in parentheses after the name@version to make it easier for users to +in parentheses after the `name@version` to make it easier for users to recognize potential forks of a project. The tree shown is the logical dependency tree, based on package @@ -62,7 +62,7 @@ disk would be roughly identical. With the advent of automatic install-time deduplication of dependencies in npm v3, the `ls` output was modified to display the logical dependency graph as a tree structure, since this was more useful to most users. -However, without using `npm ls -l`, it became impossible show _where_ a +However, without using `npm ls -l`, it became impossible to show _where_ a package was actually installed much of the time! With the advent of automatic installation of `peerDependencies` in npm v7, diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index f0315d60efef48..b0231662f69309 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -632,7 +632,7 @@ commit. If the commit-ish has the format `#semver:`, `` can be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency. If neither `#` or `#semver:` is -specified, then `master` is used. +specified, then the default branch is used. Examples: diff --git a/deps/npm/docs/content/using-npm/workspaces.md b/deps/npm/docs/content/using-npm/workspaces.md index 82491cd74af823..5b68ef8ce9d3d9 100644 --- a/deps/npm/docs/content/using-npm/workspaces.md +++ b/deps/npm/docs/content/using-npm/workspaces.md @@ -137,6 +137,8 @@ nested workspaces to be consumed elsewhere. You can use the `workspace` configuration option to run commands in the context of a configured workspace. +Additionally, if your current directory is in a workspace, the `workspace` +configuration is implicitly set, and `prefix` is set to the root workspace. Following is a quick example on how to use the `npm run` command in the context of nested workspaces. For a project containing multiple workspaces, e.g: @@ -158,7 +160,13 @@ given command in the context of that specific workspace. e.g: npm run test --workspace=a ``` -This will run the `test` script defined within the +You could also run the command within the workspace. + +``` +cd packages/a && npm run test +``` + +Either will run the `test` script defined within the `./packages/a/package.json` file. Please note that you can also specify this argument multiple times in the diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index abc8ce6deb0e31..51efa6ea98bc07 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -142,7 +142,7 @@

npm-ci

Table of contents

- +

Synopsis

@@ -205,15 +205,108 @@

Configuration

-

audit

+

save

+
    +
  • Default: true unless when using npm update where it defaults to false
  • +
  • Type: Boolean
  • +
+

Save installed packages to a package.json file as dependencies.

+

When used with the npm rm command, removes the dependency from +package.json.

+

Will also prevent writing to package-lock.json if set to false.

+ + +

save-exact

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator.

+ + +

global

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Operates in "global" mode, so that packages are installed into the prefix +folder instead of the current working directory. See +folders for more on the differences in behavior.

+
    +
  • packages are installed into the {prefix}/lib/node_modules folder, instead +of the current working directory.
  • +
  • bin files are linked to {prefix}/bin
  • +
  • man pages are linked to {prefix}/share/man
  • +
+ + +

global-style

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Causes npm to install the package into your local node_modules folder with +the same layout it uses with the global node_modules folder. Only your +direct dependencies will show in node_modules and everything they depend +on will be flattened in their node_modules folders. This obviously will +eliminate some deduping. If used with legacy-bundling, legacy-bundling +will be preferred.

+ + +

legacy-bundling

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with global-style this option +will be preferred.

+ + +

omit

+
    +
  • Default: 'dev' if the NODE_ENV environment variable is set to +'production', otherwise empty.
  • +
  • Type: "dev", "optional", or "peer" (can be set multiple times)
  • +
+

Dependency types to omit from the installation tree on disk.

+

Note that these dependencies are still resolved and added to the +package-lock.json or npm-shrinkwrap.json file. They are just not +physically installed on disk.

+

If a package type appears in both the --include and --omit lists, then +it will be included.

+

If the resulting omit list includes 'dev', then the NODE_ENV environment +variable will be set to 'production' for all lifecycle scripts.

+ + +

strict-peer-deps

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

If set to true, and --legacy-peer-deps is not set, then any +conflicting peerDependencies will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships.

+

By default, conflicting peerDependencies deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's peerDependencies object.

+

When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If --strict-peer-deps is set, then +this warning is treated as a failure.

+ + +

package-lock

  • Default: true
  • Type: Boolean
-

When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes. See the -documentation for npm audit for details on what is -submitted.

+

If set to false, then ignore package-lock.json files when installing. This +will also prevent writing package-lock.json if save is true.

+

This configuration does not affect npm ci.

foreground-scripts

@@ -240,12 +333,110 @@

ignore-scripts

will not run any pre- or post-scripts.

-

script-shell

+

audit

    -
  • Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows
  • -
  • Type: null or String
  • +
  • Default: true
  • +
  • Type: Boolean
  • +
+

When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for npm audit for details on what is +submitted.

+ + + +
    +
  • Default: true
  • +
  • Type: Boolean
  • +
+

Tells npm to create symlinks (or .cmd shims on Windows) for package +executables.

+

Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems.

+ + +

fund

+
    +
  • Default: true
  • +
  • Type: Boolean
  • +
+

When "true" displays the message at the end of each npm install +acknowledging the number of dependencies looking for funding. See npm fund for details.

+ + +

dry-run

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, install, update, +dedupe, uninstall, as well as pack and publish.

+

Note: This is NOT honored by other network related commands, eg dist-tags, +owner, etc.

+ + +

workspace

+
    +
  • Default:
  • +
  • Type: String (can be set multiple times)
  • +
+

Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option.

+

Valid values for the workspace config are either:

+
    +
  • Workspace names
  • +
  • Path to a workspace directory
  • +
  • Path to a parent workspace directory (will result in selecting all +workspaces within that folder)
  • +
+

When set for the npm init command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project.

+

This value is not exported to the environment for child processes.

+ + +

workspaces

+
    +
  • Default: null
  • +
  • Type: null or Boolean
  • +
+

Set to true to run the command in the context of all configured +workspaces.

+

Explicitly setting this to false will cause commands like install to +ignore workspaces altogether. When not set explicitly:

+
    +
  • Commands that operate on the node_modules tree (install, update, etc.) +will link workspaces into the node_modules folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +unless one or more workspaces are specified in the workspace config.
  • +
+

This value is not exported to the environment for child processes.

+ + +

include-workspace-root

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

Include the workspace root when workspaces are enabled for a command.

+

When false, specifying individual workspaces via the workspace config, or +all workspaces via the workspaces flag, will cause npm to operate only on +the specified workspaces, and not on the root project.

+

This value is not exported to the environment for child processes.

+ + + +
    +
  • Default: false
  • +
  • Type: Boolean
-

The shell to use for scripts run with the npm exec, npm run and npm init <package-spec> commands.

+

When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html index c7c00b3531becf..3897be30c76a9f 100644 --- a/deps/npm/docs/output/commands/npm-init.html +++ b/deps/npm/docs/output/commands/npm-init.html @@ -170,6 +170,8 @@

Description

  • npm init foo -> npm exec create-foo
  • npm init @usr/foo -> npm exec @usr/create-foo
  • npm init @usr -> npm exec @usr/create
  • +
  • npm init @usr@2.0.0 -> npm exec @usr/create@2.0.0
  • +
  • npm init @usr/foo@2.0.0 -> npm exec @usr/create-foo@2.0.0
  • If the initializer is omitted (by just calling npm init), init will fall back to legacy init behavior. It will ask you a bunch of questions, and diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html index 8d3d9541421fe0..f7d6e04649c48b 100644 --- a/deps/npm/docs/output/commands/npm-install-ci-test.html +++ b/deps/npm/docs/output/commands/npm-install-ci-test.html @@ -142,7 +142,7 @@

    npm-install-ci-test

    Table of contents

    - +

    Synopsis

    @@ -162,15 +162,108 @@

    Configuration

    -

    audit

    +

    save

    +
      +
    • Default: true unless when using npm update where it defaults to false
    • +
    • Type: Boolean
    • +
    +

    Save installed packages to a package.json file as dependencies.

    +

    When used with the npm rm command, removes the dependency from +package.json.

    +

    Will also prevent writing to package-lock.json if set to false.

    + + +

    save-exact

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator.

    + + +

    global

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Operates in "global" mode, so that packages are installed into the prefix +folder instead of the current working directory. See +folders for more on the differences in behavior.

    +
      +
    • packages are installed into the {prefix}/lib/node_modules folder, instead +of the current working directory.
    • +
    • bin files are linked to {prefix}/bin
    • +
    • man pages are linked to {prefix}/share/man
    • +
    + + +

    global-style

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Causes npm to install the package into your local node_modules folder with +the same layout it uses with the global node_modules folder. Only your +direct dependencies will show in node_modules and everything they depend +on will be flattened in their node_modules folders. This obviously will +eliminate some deduping. If used with legacy-bundling, legacy-bundling +will be preferred.

    + + +

    legacy-bundling

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with global-style this option +will be preferred.

    + + +

    omit

    +
      +
    • Default: 'dev' if the NODE_ENV environment variable is set to +'production', otherwise empty.
    • +
    • Type: "dev", "optional", or "peer" (can be set multiple times)
    • +
    +

    Dependency types to omit from the installation tree on disk.

    +

    Note that these dependencies are still resolved and added to the +package-lock.json or npm-shrinkwrap.json file. They are just not +physically installed on disk.

    +

    If a package type appears in both the --include and --omit lists, then +it will be included.

    +

    If the resulting omit list includes 'dev', then the NODE_ENV environment +variable will be set to 'production' for all lifecycle scripts.

    + + +

    strict-peer-deps

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    If set to true, and --legacy-peer-deps is not set, then any +conflicting peerDependencies will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships.

    +

    By default, conflicting peerDependencies deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's peerDependencies object.

    +

    When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If --strict-peer-deps is set, then +this warning is treated as a failure.

    + + +

    package-lock

    • Default: true
    • Type: Boolean
    -

    When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes. See the -documentation for npm audit for details on what is -submitted.

    +

    If set to false, then ignore package-lock.json files when installing. This +will also prevent writing package-lock.json if save is true.

    +

    This configuration does not affect npm ci.

    foreground-scripts

    @@ -197,12 +290,110 @@

    ignore-scripts

    will not run any pre- or post-scripts.

    -

    script-shell

    +

    audit

      -
    • Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows
    • -
    • Type: null or String
    • +
    • Default: true
    • +
    • Type: Boolean
    • +
    +

    When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for npm audit for details on what is +submitted.

    + + + +
      +
    • Default: true
    • +
    • Type: Boolean
    • +
    +

    Tells npm to create symlinks (or .cmd shims on Windows) for package +executables.

    +

    Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems.

    + + +

    fund

    +
      +
    • Default: true
    • +
    • Type: Boolean
    • +
    +

    When "true" displays the message at the end of each npm install +acknowledging the number of dependencies looking for funding. See npm fund for details.

    + + +

    dry-run

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, install, update, +dedupe, uninstall, as well as pack and publish.

    +

    Note: This is NOT honored by other network related commands, eg dist-tags, +owner, etc.

    + + +

    workspace

    +
      +
    • Default:
    • +
    • Type: String (can be set multiple times)
    • +
    +

    Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option.

    +

    Valid values for the workspace config are either:

    +
      +
    • Workspace names
    • +
    • Path to a workspace directory
    • +
    • Path to a parent workspace directory (will result in selecting all +workspaces within that folder)
    • +
    +

    When set for the npm init command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project.

    +

    This value is not exported to the environment for child processes.

    + + +

    workspaces

    +
      +
    • Default: null
    • +
    • Type: null or Boolean
    • +
    +

    Set to true to run the command in the context of all configured +workspaces.

    +

    Explicitly setting this to false will cause commands like install to +ignore workspaces altogether. When not set explicitly:

    +
      +
    • Commands that operate on the node_modules tree (install, update, etc.) +will link workspaces into the node_modules folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +unless one or more workspaces are specified in the workspace config.
    • +
    +

    This value is not exported to the environment for child processes.

    + + +

    include-workspace-root

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Include the workspace root when workspaces are enabled for a command.

    +

    When false, specifying individual workspaces via the workspace config, or +all workspaces via the workspaces flag, will cause npm to operate only on +the specified workspaces, and not on the root project.

    +

    This value is not exported to the environment for child processes.

    + + + +
      +
    • Default: false
    • +
    • Type: Boolean
    -

    The shell to use for scripts run with the npm exec, npm run and npm init <package-spec> commands.

    +

    When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

    diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index 48f62f9e4822c7..7702ab5c167750 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -408,7 +408,7 @@

    Description

    can be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency. If neither #<commit-ish> or -#semver:<semver> is specified, then master is used.

    +#semver:<semver> is specified, then the default branch is used.

    As with regular git dependencies, dependencies and devDependencies will be installed if the package has a prepare script before the package is done installing.

    diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 47b3bbc085e164..ce8eda1b71176d 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -166,13 +166,13 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@8.15.0 /path/to/npm
    +
    npm@8.15.1 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     

    It will print out extraneous, missing, and invalid packages.

    If a project specifies git urls for dependencies these are shown -in parentheses after the name@version to make it easier for users to +in parentheses after the name@version to make it easier for users to recognize potential forks of a project.

    The tree shown is the logical dependency tree, based on package dependencies, not the physical layout of your node_modules folder.

    @@ -185,7 +185,7 @@

    Note: Design Changes Pending

    With the advent of automatic install-time deduplication of dependencies in npm v3, the ls output was modified to display the logical dependency graph as a tree structure, since this was more useful to most users. -However, without using npm ls -l, it became impossible show where a +However, without using npm ls -l, it became impossible to show where a package was actually installed much of the time!

    With the advent of automatic installation of peerDependencies in npm v7, this gets even more curious, as peerDependencies are logically diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 514017fd875943..b254b72c370861 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -149,7 +149,7 @@

    Table of contents

    Version

    -

    8.15.0

    +

    8.15.1

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index 354069b1a2c738..2b1240ebee9124 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -622,7 +622,7 @@

    Git URLs as Dependencies

    be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency. If neither #<commit-ish> or #semver:<semver> is -specified, then master is used.

    +specified, then the default branch is used.

    Examples:

    git+ssh://git@github.com:npm/cli.git#v1.0.27
     git+ssh://git@github.com:npm/cli#semver:^5.0
    diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html
    index 06928563acc1fd..2a3dd880c2c697 100644
    --- a/deps/npm/docs/output/using-npm/workspaces.html
    +++ b/deps/npm/docs/output/using-npm/workspaces.html
    @@ -240,7 +240,9 @@ 

    Using workspaces

    nested workspaces to be consumed elsewhere.

    Running commands in the context of workspaces

    You can use the workspace configuration option to run commands in the context -of a configured workspace.

    +of a configured workspace. +Additionally, if your current directory is in a workspace, the workspace +configuration is implicitly set, and prefix is set to the root workspace.

    Following is a quick example on how to use the npm run command in the context of nested workspaces. For a project containing multiple workspaces, e.g:

    .
    @@ -255,7 +257,10 @@ 

    Running commands in the c given command in the context of that specific workspace. e.g:

    npm run test --workspace=a
     
    -

    This will run the test script defined within the +

    You could also run the command within the workspace.

    +
    cd packages/a && npm run test
    +
    +

    Either will run the test script defined within the ./packages/a/package.json file.

    Please note that you can also specify this argument multiple times in the command-line in order to target multiple workspaces, e.g:

    diff --git a/deps/npm/lib/commands/ci.js b/deps/npm/lib/commands/ci.js index 2a6125d564e912..0adf203a9856e6 100644 --- a/deps/npm/lib/commands/ci.js +++ b/deps/npm/lib/commands/ci.js @@ -9,16 +9,13 @@ const log = require('../utils/log-shim.js') const validateLockfile = require('../utils/validate-lockfile.js') const ArboristWorkspaceCmd = require('../arborist-cmd.js') +const Install = require('./install.js') class CI extends ArboristWorkspaceCmd { static description = 'Clean install a project' static name = 'ci' - static params = [ - 'audit', - 'foreground-scripts', - 'ignore-scripts', - 'script-shell', - ] + + static params = Install.params async exec () { if (this.npm.global) { diff --git a/deps/npm/lib/commands/diff.js b/deps/npm/lib/commands/diff.js index b8a64bd98a0394..bbd6fae6680ca9 100644 --- a/deps/npm/lib/commands/diff.js +++ b/deps/npm/lib/commands/diff.js @@ -106,7 +106,7 @@ class Diff extends BaseCommand { const pkgName = await this.packageName(this.prefix) return [ `${pkgName}@${this.npm.config.get('tag')}`, - `file:${this.prefix}`, + `file:${this.prefix.replace(/#/g, '%23')}`, ] } @@ -134,7 +134,7 @@ class Diff extends BaseCommand { } return [ `${pkgName}@${a}`, - `file:${this.prefix}`, + `file:${this.prefix.replace(/#/g, '%23')}`, ] } @@ -165,7 +165,7 @@ class Diff extends BaseCommand { } return [ `${spec.name}@${spec.fetchSpec}`, - `file:${this.prefix}`, + `file:${this.prefix.replace(/#/g, '%23')}`, ] } @@ -178,7 +178,7 @@ class Diff extends BaseCommand { } } - const aSpec = `file:${node.realpath}` + const aSpec = `file:${node.realpath.replace(/#/g, '%23')}` // finds what version of the package to compare against, if a exact // version or tag was passed than it should use that, otherwise @@ -211,8 +211,8 @@ class Diff extends BaseCommand { ] } else if (spec.type === 'directory') { return [ - `file:${spec.fetchSpec}`, - `file:${this.prefix}`, + `file:${spec.fetchSpec.replace(/#/g, '%23')}`, + `file:${this.prefix.replace(/#/g, '%23')}`, ] } else { throw this.usageError(`Spec type ${spec.type} not supported.`) @@ -279,7 +279,7 @@ class Diff extends BaseCommand { const res = !node || !node.package || !node.package.version ? spec.fetchSpec - : `file:${node.realpath}` + : `file:${node.realpath.replace(/#/g, '%23')}` return `${spec.name}@${res}` }) diff --git a/deps/npm/lib/commands/init.js b/deps/npm/lib/commands/init.js index b8b6bd5d53e088..cff8340dcd7074 100644 --- a/deps/npm/lib/commands/init.js +++ b/deps/npm/lib/commands/init.js @@ -85,8 +85,13 @@ class Init extends BaseCommand { const [initerName, ...otherArgs] = args let packageName = initerName + // Only a scope, possibly with a version if (/^@[^/]+$/.test(initerName)) { - packageName = initerName + '/create' + const [, scope, version] = initerName.split('@') + packageName = `@${scope}/create` + if (version) { + packageName = `${packageName}@${version}` + } } else { const req = npa(initerName) if (req.type === 'git' && req.hosted) { diff --git a/deps/npm/lib/commands/link.js b/deps/npm/lib/commands/link.js index b0b889ea787fd5..7bce73ed2bb6f5 100644 --- a/deps/npm/lib/commands/link.js +++ b/deps/npm/lib/commands/link.js @@ -122,7 +122,7 @@ class Link extends ArboristWorkspaceCmd { ...this.npm.flatOptions, prune: false, path: this.npm.prefix, - add: names.map(l => `file:${resolve(globalTop, 'node_modules', l)}`), + add: names.map(l => `file:${resolve(globalTop, 'node_modules', l).replace(/#/g, '%23')}`), save, workspaces: this.workspaceNames, }) @@ -133,7 +133,7 @@ class Link extends ArboristWorkspaceCmd { async linkPkg () { const wsp = this.workspacePaths const paths = wsp && wsp.length ? wsp : [this.npm.prefix] - const add = paths.map(path => `file:${path}`) + const add = paths.map(path => `file:${path.replace(/#/g, '%23')}`) const globalTop = resolve(this.npm.globalDir, '..') const arb = new Arborist({ ...this.npm.flatOptions, diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index 2197f11a52c4a9..66111cab89a844 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -241,16 +241,18 @@ class Npm extends EventEmitter { await this.time('npm:load:configload', () => this.config.load()) // mkdir this separately since the logs dir can be set to - // a different location. an error here should be surfaced - // right away since it will error in cacache later + // a different location. if this fails, then we don't have + // a cache dir, but we don't want to fail immediately since + // the command might not need a cache dir (like `npm --version`) await this.time('npm:load:mkdirpcache', () => - fs.mkdir(this.cache, { recursive: true, owner: 'inherit' })) + fs.mkdir(this.cache, { recursive: true, owner: 'inherit' }) + .catch((e) => log.verbose('cache', `could not create cache: ${e}`))) // its ok if this fails. user might have specified an invalid dir // which we will tell them about at the end await this.time('npm:load:mkdirplogs', () => fs.mkdir(this.logsDir, { recursive: true, owner: 'inherit' }) - .catch((e) => log.warn('logfile', `could not create logs-dir: ${e}`))) + .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`))) // note: this MUST be shorter than the actual argv length, because it // uses the same memory, so node will truncate it if it's too long. diff --git a/deps/npm/lib/utils/log-file.js b/deps/npm/lib/utils/log-file.js index 9cf6513bedf484..d62329c8551e21 100644 --- a/deps/npm/lib/utils/log-file.js +++ b/deps/npm/lib/utils/log-file.js @@ -204,7 +204,9 @@ class LogFiles { this.#files.push(logStream.path) return logStream } catch (e) { - log.warn('logfile', `could not be created: ${e}`) + // If the user has a readonly logdir then we don't want to + // warn this on every command so it should be verbose + log.verbose('logfile', `could not be created: ${e}`) } } @@ -226,7 +228,7 @@ class LogFiles { ) // Always ignore the currently written files - const files = await glob(globify(logGlob), { ignore: this.#files.map(globify) }) + const files = await glob(globify(logGlob), { ignore: this.#files.map(globify), silent: true }) const toDelete = files.length - this.#logsMax if (toDelete <= 0) { diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 316da3a61189ce..6a83ab881caec5 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -80,7 +80,126 @@ cache: .fi .RE .SS Configuration -.SS \fBaudit\fP +.SS \fBsave\fP +.RS 0 +.IP \(bu 2 +Default: \fBtrue\fP unless when using \fBnpm update\fP where it defaults to \fBfalse\fP +.IP \(bu 2 +Type: Boolean + +.RE +.P +Save installed packages to a \fBpackage\.json\fP file as dependencies\. +.P +When used with the \fBnpm rm\fP command, removes the dependency from +\fBpackage\.json\fP\|\. +.P +Will also prevent writing to \fBpackage\-lock\.json\fP if set to \fBfalse\fP\|\. +.SS \fBsave\-exact\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Dependencies saved to package\.json will be configured with an exact version +rather than using npm's default semver range operator\. +.SS \fBglobal\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Operates in "global" mode, so that packages are installed into the \fBprefix\fP +folder instead of the current working directory\. See +npm help folders for more on the differences in behavior\. +.RS 0 +.IP \(bu 2 +packages are installed into the \fB{prefix}/lib/node_modules\fP folder, instead +of the current working directory\. +.IP \(bu 2 +bin files are linked to \fB{prefix}/bin\fP +.IP \(bu 2 +man pages are linked to \fB{prefix}/share/man\fP + +.RE +.SS \fBglobal\-style\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Causes npm to install the package into your local \fBnode_modules\fP folder with +the same layout it uses with the global \fBnode_modules\fP folder\. Only your +direct dependencies will show in \fBnode_modules\fP and everything they depend +on will be flattened in their \fBnode_modules\fP folders\. This obviously will +eliminate some deduping\. If used with \fBlegacy\-bundling\fP, \fBlegacy\-bundling\fP +will be preferred\. +.SS \fBlegacy\-bundling\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Causes npm to install the package such that versions of npm prior to 1\.4, +such as the one included with node 0\.8, can install the package\. This +eliminates all automatic deduping\. If used with \fBglobal\-style\fP this option +will be preferred\. +.SS \fBomit\fP +.RS 0 +.IP \(bu 2 +Default: 'dev' if the \fBNODE_ENV\fP environment variable is set to +\|'production', otherwise empty\. +.IP \(bu 2 +Type: "dev", "optional", or "peer" (can be set multiple times) + +.RE +.P +Dependency types to omit from the installation tree on disk\. +.P +Note that these dependencies \fIare\fR still resolved and added to the +\fBpackage\-lock\.json\fP or \fBnpm\-shrinkwrap\.json\fP file\. They are just not +physically installed on disk\. +.P +If a package type appears in both the \fB\-\-include\fP and \fB\-\-omit\fP lists, then +it will be included\. +.P +If the resulting omit list includes \fB\|'dev'\fP, then the \fBNODE_ENV\fP environment +variable will be set to \fB\|'production'\fP for all lifecycle scripts\. +.SS \fBstrict\-peer\-deps\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +If set to \fBtrue\fP, and \fB\-\-legacy\-peer\-deps\fP is not set, then \fIany\fR +conflicting \fBpeerDependencies\fP will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non\-peer +dependency relationships\. +.P +By default, conflicting \fBpeerDependencies\fP deep in the dependency graph will +be resolved using the nearest non\-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's \fBpeerDependencies\fP object\. +.P +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved\. If \fB\-\-strict\-peer\-deps\fP is set, then +this warning is treated as a failure\. +.SS \fBpackage\-lock\fP .RS 0 .IP \(bu 2 Default: true @@ -89,10 +208,10 @@ Type: Boolean .RE .P -When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes\. See the -documentation for npm help \fBaudit\fP for details on what is -submitted\. +If set to false, then ignore \fBpackage\-lock\.json\fP files when installing\. This +will also prevent \fIwriting\fR \fBpackage\-lock\.json\fP if \fBsave\fP is true\. +.P +This configuration does not affect \fBnpm ci\fP\|\. .SS \fBforeground\-scripts\fP .RS 0 .IP \(bu 2 @@ -123,17 +242,144 @@ Note that commands explicitly intended to run a particular script, such as \fBnpm start\fP, \fBnpm stop\fP, \fBnpm restart\fP, \fBnpm test\fP, and \fBnpm run\-script\fP will still run their intended script if \fBignore\-scripts\fP is set, but they will \fInot\fR run any pre\- or post\-scripts\. -.SS \fBscript\-shell\fP +.SS \fBaudit\fP +.RS 0 +.IP \(bu 2 +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes\. See the +documentation for npm help \fBaudit\fP for details on what is +submitted\. +.SS \fBbin\-links\fP +.RS 0 +.IP \(bu 2 +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +Tells npm to create symlinks (or \fB\|\.cmd\fP shims on Windows) for package +executables\. +.P +Set to false to have it not do this\. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems\. +.SS \fBfund\fP .RS 0 .IP \(bu 2 -Default: '/bin/sh' on POSIX systems, 'cmd\.exe' on Windows +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +When "true" displays the message at the end of each \fBnpm install\fP +acknowledging the number of dependencies looking for funding\. See npm help \fBnpm +fund\fP for details\. +.SS \fBdry\-run\fP +.RS 0 .IP \(bu 2 -Type: null or String +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Indicates that you don't want npm to make any changes and that it should +only report what it would have done\. This can be passed into any of the +commands that modify your local installation, eg, \fBinstall\fP, \fBupdate\fP, +\fBdedupe\fP, \fBuninstall\fP, as well as \fBpack\fP and \fBpublish\fP\|\. +.P +Note: This is NOT honored by other network related commands, eg \fBdist\-tags\fP, +\fBowner\fP, etc\. +.SS \fBworkspace\fP +.RS 0 +.IP \(bu 2 +Default: +.IP \(bu 2 +Type: String (can be set multiple times) + +.RE +.P +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option\. +.P +Valid values for the \fBworkspace\fP config are either: +.RS 0 +.IP \(bu 2 +Workspace names +.IP \(bu 2 +Path to a workspace directory +.IP \(bu 2 +Path to a parent workspace directory (will result in selecting all +workspaces within that folder) + +.RE +.P +When set for the \fBnpm init\fP command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project\. +.P +This value is not exported to the environment for child processes\. +.SS \fBworkspaces\fP +.RS 0 +.IP \(bu 2 +Default: null +.IP \(bu 2 +Type: null or Boolean + +.RE +.P +Set to true to run the command in the context of \fBall\fR configured +workspaces\. +.P +Explicitly setting this to false will cause commands like \fBinstall\fP to +ignore workspaces altogether\. When not set explicitly: +.RS 0 +.IP \(bu 2 +Commands that operate on the \fBnode_modules\fP tree (install, update, etc\.) +will link workspaces into the \fBnode_modules\fP folder\. \- Commands that do +other things (test, exec, publish, etc\.) will operate on the root project, +\fIunless\fR one or more workspaces are specified in the \fBworkspace\fP config\. + +.RE +.P +This value is not exported to the environment for child processes\. +.SS \fBinclude\-workspace\-root\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Include the workspace root when workspaces are enabled for a command\. +.P +When false, specifying individual workspaces via the \fBworkspace\fP config, or +all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on +the specified workspaces, and not on the root project\. +.P +This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean .RE .P -The shell to use for scripts run with the \fBnpm exec\fP, \fBnpm run\fP and \fBnpm -init \fP commands\. +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 50334564f99085..b21a794af47cc2 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -30,6 +30,10 @@ follows: \fBnpm init @usr/foo\fP \-> \fBnpm exec @usr/create\-foo\fP .IP \(bu 2 \fBnpm init @usr\fP \-> \fBnpm exec @usr/create\fP +.IP \(bu 2 +\fBnpm init @usr@2\.0\.0\fP \-> \fBnpm exec @usr/create@2\.0\.0\fP +.IP \(bu 2 +\fBnpm init @usr/foo@2\.0\.0\fP \-> \fBnpm exec @usr/create\-foo@2\.0\.0\fP .RE .P diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 59fb33b10bdafc..10f785698528d9 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -14,7 +14,126 @@ alias: cit .P This command runs \fBnpm ci\fP followed immediately by \fBnpm test\fP\|\. .SS Configuration -.SS \fBaudit\fP +.SS \fBsave\fP +.RS 0 +.IP \(bu 2 +Default: \fBtrue\fP unless when using \fBnpm update\fP where it defaults to \fBfalse\fP +.IP \(bu 2 +Type: Boolean + +.RE +.P +Save installed packages to a \fBpackage\.json\fP file as dependencies\. +.P +When used with the \fBnpm rm\fP command, removes the dependency from +\fBpackage\.json\fP\|\. +.P +Will also prevent writing to \fBpackage\-lock\.json\fP if set to \fBfalse\fP\|\. +.SS \fBsave\-exact\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Dependencies saved to package\.json will be configured with an exact version +rather than using npm's default semver range operator\. +.SS \fBglobal\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Operates in "global" mode, so that packages are installed into the \fBprefix\fP +folder instead of the current working directory\. See +npm help folders for more on the differences in behavior\. +.RS 0 +.IP \(bu 2 +packages are installed into the \fB{prefix}/lib/node_modules\fP folder, instead +of the current working directory\. +.IP \(bu 2 +bin files are linked to \fB{prefix}/bin\fP +.IP \(bu 2 +man pages are linked to \fB{prefix}/share/man\fP + +.RE +.SS \fBglobal\-style\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Causes npm to install the package into your local \fBnode_modules\fP folder with +the same layout it uses with the global \fBnode_modules\fP folder\. Only your +direct dependencies will show in \fBnode_modules\fP and everything they depend +on will be flattened in their \fBnode_modules\fP folders\. This obviously will +eliminate some deduping\. If used with \fBlegacy\-bundling\fP, \fBlegacy\-bundling\fP +will be preferred\. +.SS \fBlegacy\-bundling\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Causes npm to install the package such that versions of npm prior to 1\.4, +such as the one included with node 0\.8, can install the package\. This +eliminates all automatic deduping\. If used with \fBglobal\-style\fP this option +will be preferred\. +.SS \fBomit\fP +.RS 0 +.IP \(bu 2 +Default: 'dev' if the \fBNODE_ENV\fP environment variable is set to +\|'production', otherwise empty\. +.IP \(bu 2 +Type: "dev", "optional", or "peer" (can be set multiple times) + +.RE +.P +Dependency types to omit from the installation tree on disk\. +.P +Note that these dependencies \fIare\fR still resolved and added to the +\fBpackage\-lock\.json\fP or \fBnpm\-shrinkwrap\.json\fP file\. They are just not +physically installed on disk\. +.P +If a package type appears in both the \fB\-\-include\fP and \fB\-\-omit\fP lists, then +it will be included\. +.P +If the resulting omit list includes \fB\|'dev'\fP, then the \fBNODE_ENV\fP environment +variable will be set to \fB\|'production'\fP for all lifecycle scripts\. +.SS \fBstrict\-peer\-deps\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +If set to \fBtrue\fP, and \fB\-\-legacy\-peer\-deps\fP is not set, then \fIany\fR +conflicting \fBpeerDependencies\fP will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non\-peer +dependency relationships\. +.P +By default, conflicting \fBpeerDependencies\fP deep in the dependency graph will +be resolved using the nearest non\-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's \fBpeerDependencies\fP object\. +.P +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved\. If \fB\-\-strict\-peer\-deps\fP is set, then +this warning is treated as a failure\. +.SS \fBpackage\-lock\fP .RS 0 .IP \(bu 2 Default: true @@ -23,10 +142,10 @@ Type: Boolean .RE .P -When "true" submit audit reports alongside the current npm command to the -default registry and all registries configured for scopes\. See the -documentation for npm help \fBaudit\fP for details on what is -submitted\. +If set to false, then ignore \fBpackage\-lock\.json\fP files when installing\. This +will also prevent \fIwriting\fR \fBpackage\-lock\.json\fP if \fBsave\fP is true\. +.P +This configuration does not affect \fBnpm ci\fP\|\. .SS \fBforeground\-scripts\fP .RS 0 .IP \(bu 2 @@ -57,17 +176,144 @@ Note that commands explicitly intended to run a particular script, such as \fBnpm start\fP, \fBnpm stop\fP, \fBnpm restart\fP, \fBnpm test\fP, and \fBnpm run\-script\fP will still run their intended script if \fBignore\-scripts\fP is set, but they will \fInot\fR run any pre\- or post\-scripts\. -.SS \fBscript\-shell\fP +.SS \fBaudit\fP +.RS 0 +.IP \(bu 2 +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes\. See the +documentation for npm help \fBaudit\fP for details on what is +submitted\. +.SS \fBbin\-links\fP +.RS 0 +.IP \(bu 2 +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +Tells npm to create symlinks (or \fB\|\.cmd\fP shims on Windows) for package +executables\. +.P +Set to false to have it not do this\. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems\. +.SS \fBfund\fP .RS 0 .IP \(bu 2 -Default: '/bin/sh' on POSIX systems, 'cmd\.exe' on Windows +Default: true +.IP \(bu 2 +Type: Boolean + +.RE +.P +When "true" displays the message at the end of each \fBnpm install\fP +acknowledging the number of dependencies looking for funding\. See npm help \fBnpm +fund\fP for details\. +.SS \fBdry\-run\fP +.RS 0 .IP \(bu 2 -Type: null or String +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Indicates that you don't want npm to make any changes and that it should +only report what it would have done\. This can be passed into any of the +commands that modify your local installation, eg, \fBinstall\fP, \fBupdate\fP, +\fBdedupe\fP, \fBuninstall\fP, as well as \fBpack\fP and \fBpublish\fP\|\. +.P +Note: This is NOT honored by other network related commands, eg \fBdist\-tags\fP, +\fBowner\fP, etc\. +.SS \fBworkspace\fP +.RS 0 +.IP \(bu 2 +Default: +.IP \(bu 2 +Type: String (can be set multiple times) + +.RE +.P +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option\. +.P +Valid values for the \fBworkspace\fP config are either: +.RS 0 +.IP \(bu 2 +Workspace names +.IP \(bu 2 +Path to a workspace directory +.IP \(bu 2 +Path to a parent workspace directory (will result in selecting all +workspaces within that folder) + +.RE +.P +When set for the \fBnpm init\fP command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project\. +.P +This value is not exported to the environment for child processes\. +.SS \fBworkspaces\fP +.RS 0 +.IP \(bu 2 +Default: null +.IP \(bu 2 +Type: null or Boolean + +.RE +.P +Set to true to run the command in the context of \fBall\fR configured +workspaces\. +.P +Explicitly setting this to false will cause commands like \fBinstall\fP to +ignore workspaces altogether\. When not set explicitly: +.RS 0 +.IP \(bu 2 +Commands that operate on the \fBnode_modules\fP tree (install, update, etc\.) +will link workspaces into the \fBnode_modules\fP folder\. \- Commands that do +other things (test, exec, publish, etc\.) will operate on the root project, +\fIunless\fR one or more workspaces are specified in the \fBworkspace\fP config\. + +.RE +.P +This value is not exported to the environment for child processes\. +.SS \fBinclude\-workspace\-root\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +Include the workspace root when workspaces are enabled for a command\. +.P +When false, specifying individual workspaces via the \fBworkspace\fP config, or +all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on +the specified workspaces, and not on the root project\. +.P +This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean .RE .P -The shell to use for scripts run with the \fBnpm exec\fP, \fBnpm run\fP and \fBnpm -init \fP commands\. +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 43adfbe9e4b5f0..f54f1e202cd067 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -320,7 +320,7 @@ GIT_SSH_COMMAND='ssh \-i ~/\.ssh/custom_ident' npm install git+ssh://git@github\ can be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency\. If neither \fB#\fP or - \fB#semver:\fP is specified, then \fBmaster\fP is used\. + \fB#semver:\fP is specified, then the default branch is used\. As with regular git dependencies, \fBdependencies\fP and \fBdevDependencies\fP will be installed if the package has a \fBprepare\fP script before the package is done installing\. diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 511f481a6ea9fd..cc465caebe5a1e 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@8\.15\.0 /path/to/npm +npm@8\.15\.1 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi @@ -35,7 +35,7 @@ npm@8\.15\.0 /path/to/npm It will print out extraneous, missing, and invalid packages\. .P If a project specifies git urls for dependencies these are shown -in parentheses after the name@version to make it easier for users to +in parentheses after the \fBname@version\fP to make it easier for users to recognize potential forks of a project\. .P The tree shown is the logical dependency tree, based on package @@ -52,7 +52,7 @@ disk would be roughly identical\. With the advent of automatic install\-time deduplication of dependencies in npm v3, the \fBls\fP output was modified to display the logical dependency graph as a tree structure, since this was more useful to most users\. -However, without using \fBnpm ls \-l\fP, it became impossible show \fIwhere\fR a +However, without using \fBnpm ls \-l\fP, it became impossible to show \fIwhere\fR a package was actually installed much of the time! .P With the advent of automatic installation of \fBpeerDependencies\fP in npm v7, diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 984dbc49192dd7..b3bc8bb417198d 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -4,7 +4,7 @@ .SS Synopsis .SS Version .P -8\.15\.0 +8\.15\.1 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 0fd5174f6aa7b6..ae522f1de8294a 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -709,7 +709,7 @@ commit\. If the commit\-ish has the format \fB#semver:\fP, \fB\f be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency\. If neither \fB#\fP or \fB#semver:\fP is -specified, then \fBmaster\fP is used\. +specified, then the default branch is used\. .P Examples: .P diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 0bc4488b44e5f2..6575130ec7f759 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -143,6 +143,8 @@ nested workspaces to be consumed elsewhere\. .P You can use the \fBworkspace\fP configuration option to run commands in the context of a configured workspace\. +Additionally, if your current directory is in a workspace, the \fBworkspace\fP +configuration is implicitly set, and \fBprefix\fP is set to the root workspace\. .P Following is a quick example on how to use the \fBnpm run\fP command in the context of nested workspaces\. For a project containing multiple workspaces, e\.g: @@ -168,7 +170,15 @@ npm run test \-\-workspace=a .fi .RE .P -This will run the \fBtest\fP script defined within the +You could also run the command within the workspace\. +.P +.RS 2 +.nf +cd packages/a && npm run test +.fi +.RE +.P +Either will run the \fBtest\fP script defined within the \fB\|\./packages/a/package\.json\fP file\. .P Please note that you can also specify this argument multiple times in the diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index da2652c449a1cd..0e98ed6fc533c1 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -484,7 +484,7 @@ Try using the package name instead, e.g: .catch(/* istanbul ignore next */ er => null) if (st && st.isSymbolicLink()) { const target = await readlink(dir) - const real = resolve(dirname(dir), target) + const real = resolve(dirname(dir), target).replace(/#/g, '%23') tree.package.dependencies[name] = `file:${real}` } else { tree.package.dependencies[name] = '*' @@ -603,7 +603,7 @@ Try using the package name instead, e.g: if (filepath) { const { name } = spec const tree = this.idealTree.target - spec = npa(`file:${relpath(tree.path, filepath)}`, tree.path) + spec = npa(`file:${relpath(tree.path, filepath).replace(/#/g, '%23')}`, tree.path) spec.name = name } return spec diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 43351b69034af8..d4eabe8c0fdfd7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -196,7 +196,7 @@ module.exports = cls => class ActualLoader extends cls { const actualRoot = tree.isLink ? tree.target : tree const { dependencies = {} } = actualRoot.package for (const [name, kid] of actualRoot.children.entries()) { - const def = kid.isLink ? `file:${kid.realpath}` : '*' + const def = kid.isLink ? `file:${kid.realpath.replace(/#/g, '%23')}` : '*' dependencies[name] = dependencies[name] || def } actualRoot.package = { ...actualRoot.package, dependencies } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index fb3f334747fc8a..947659f177eefe 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -162,7 +162,7 @@ module.exports = cls => class VirtualLoader extends cls { lockfile: s.data, }) for (const [name, path] of workspaces.entries()) { - lockWS.push(['workspace', name, `file:${path}`]) + lockWS.push(['workspace', name, `file:${path.replace(/#/g, '%23')}`]) } const lockEdges = [ diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index faf016c7040109..4f1061e4abe500 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -1241,7 +1241,7 @@ module.exports = cls => class Reifier extends cls { // path initially, in which case we can end up with the wrong // thing, so just get the ultimate fetchSpec and relativize it. const p = req.fetchSpec.replace(/^file:/, '') - const rel = relpath(addTree.realpath, p) + const rel = relpath(addTree.realpath, p).replace(/#/g, '%23') newSpec = `file:${rel}` } } else { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js index e34e40a46d0024..5308dc7e2f95ea 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js @@ -20,8 +20,8 @@ const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { raw, } = npa(resolved, fromPath) const isPath = type === 'file' || type === 'directory' - return isPath && !relPaths ? `file:${fetchSpec}` - : isPath ? 'file:' + (toPath ? relpath(toPath, fetchSpec) : fetchSpec) + return isPath && !relPaths ? `file:${fetchSpec.replace(/#/g, '%23')}` + : isPath ? 'file:' + (toPath ? relpath(toPath, fetchSpec.replace(/#/g, '%23')) : fetchSpec.replace(/#/g, '%23')) : hosted ? `git+${ hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt) }` diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/link.js b/deps/npm/node_modules/@npmcli/arborist/lib/link.js index dcce8c0d3dfa55..6fed063772b6a8 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/link.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/link.js @@ -118,7 +118,7 @@ class Link extends Node { // the path/realpath guard is there for the benefit of setting // these things in the "wrong" order return this.path && this.realpath - ? `file:${relpath(dirname(this.path), this.realpath)}` + ? `file:${relpath(dirname(this.path), this.realpath).replace(/#/g, '%23')}` : null } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js index d731e5f617908a..66d46d746abf3e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js @@ -824,7 +824,7 @@ class Node { } for (const [name, path] of this[_workspaces].entries()) { - new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' }) + new Edge({ from: this, name, spec: `file:${path.replace(/#/g, '%23')}`, type: 'workspace' }) } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js index 3305bac4914be5..e2180fd4c8076e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -815,7 +815,7 @@ class Shrinkwrap { const pathFixed = !resolved ? null : !/^file:/.test(resolved) ? resolved // resolve onto the metadata path - : `file:${resolve(this.path, resolved.slice(5))}` + : `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}` // if we have one, only set the other if it matches // otherwise it could be for a completely different thing. @@ -996,7 +996,7 @@ class Shrinkwrap { : npa.resolve(node.name, edge.spec, edge.from.realpath) if (node.isLink) { - lock.version = `file:${relpath(this.path, node.realpath)}` + lock.version = `file:${relpath(this.path, node.realpath).replace(/#/g, '%23')}` } else if (spec && (spec.type === 'file' || spec.type === 'remote')) { lock.version = spec.saveSpec } else if (spec && spec.type === 'git' || rSpec.type === 'git') { @@ -1074,7 +1074,7 @@ class Shrinkwrap { // this especially shows up with workspace edges when the root // node is also a workspace in the set. const p = resolve(node.realpath, spec.slice('file:'.length)) - set[k] = `file:${relpath(node.realpath, p)}` + set[k] = `file:${relpath(node.realpath, p).replace(/#/g, '%23')}` } else { set[k] = spec } diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 328cdf0146c141..48814eda69166f 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "5.3.0", + "version": "5.3.1", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", diff --git a/deps/npm/package.json b/deps/npm/package.json index 969e8e160c28c5..7526c4754b0ff5 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "8.15.0", + "version": "8.15.1", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ diff --git a/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs index a9a10b20a2f83d..e01409e4ce1964 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs @@ -5,6 +5,11 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' +exports[`test/lib/commands/link.js TAP hash character in working directory path > should create a global link to current pkg, even within path with hash 1`] = ` +{CWD}/test/lib/commands/tap-testdir-link-hash-character-in-working-directory-path/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/commands/tap-testdir-link-hash-character-in-working-directory-path/i_like_#_in_my_paths/test-pkg-link + +` + exports[`test/lib/commands/link.js TAP link global linked pkg to local nm when using args > should create a local symlink to global pkg 1`] = ` {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/@myscope/bar {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/scoped-linked diff --git a/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs b/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs index b697dfbb796c68..88b1d49556fd89 100644 --- a/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs @@ -116,8 +116,13 @@ Usage: npm ci Options: -[--no-audit] [--foreground-scripts] [--ignore-scripts] -[--script-shell ] +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] +[-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] +[--omit [--omit ...]] +[--strict-peer-deps] [--no-package-lock] [--foreground-scripts] +[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace [-w|--workspace ...]] +[-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: clean-install, ic, install-clean, isntall-clean @@ -425,8 +430,13 @@ Usage: npm install-ci-test Options: -[--no-audit] [--foreground-scripts] [--ignore-scripts] -[--script-shell ] +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] +[-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] +[--omit [--omit ...]] +[--strict-peer-deps] [--no-package-lock] [--foreground-scripts] +[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace [-w|--workspace ...]] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: cit diff --git a/deps/npm/tap-snapshots/test/lib/npm.js.test.cjs b/deps/npm/tap-snapshots/test/lib/npm.js.test.cjs index b2ba45b2d615cb..8d5f188b07924c 100644 --- a/deps/npm/tap-snapshots/test/lib/npm.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/npm.js.test.cjs @@ -254,8 +254,13 @@ All commands: npm ci Options: - [--no-audit] [--foreground-scripts] [--ignore-scripts] - [--script-shell ] + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] + [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] + [--omit [--omit ...]] + [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] + [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace [-w|--workspace ...]] + [-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: clean-install, ic, install-clean, isntall-clean @@ -511,8 +516,13 @@ All commands: npm install-ci-test Options: - [--no-audit] [--foreground-scripts] [--ignore-scripts] - [--script-shell ] + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] + [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] + [--omit [--omit ...]] + [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] + [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace [-w|--workspace ...]] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: cit diff --git a/deps/npm/test/fixtures/mock-npm.js b/deps/npm/test/fixtures/mock-npm.js index a79812fb71a299..90bf7da4c10bc7 100644 --- a/deps/npm/test/fixtures/mock-npm.js +++ b/deps/npm/test/fixtures/mock-npm.js @@ -108,17 +108,20 @@ const LoadMockNpm = async (t, { cache: cacheDir, global: globalPrefixDir, }) - const prefix = path.join(dir, 'prefix') - const cache = path.join(dir, 'cache') - const globalPrefix = path.join(dir, 'global') - const home = path.join(dir, 'home') + const dirs = { + testdir: dir, + prefix: path.join(dir, 'prefix'), + cache: path.join(dir, 'cache'), + globalPrefix: path.join(dir, 'global'), + home: path.join(dir, 'home'), + } // Set cache to testdir via env var so it is available when load is run // XXX: remove this for a solution where cache argv is passed in mockGlobals(t, { - 'process.env.HOME': home, - 'process.env.npm_config_cache': cache, - ...(globals ? result(globals, { prefix, cache, home }) : {}), + 'process.env.HOME': dirs.home, + 'process.env.npm_config_cache': dirs.cache, + ...(globals ? result(globals, { ...dirs }) : {}), // Some configs don't work because they can't be set via npm.config.set until // config is loaded. But some config items are needed before that. So this is // an explicit set of configs that must be loaded as env vars. @@ -126,7 +129,8 @@ const LoadMockNpm = async (t, { ...Object.entries(config) .filter(([k]) => envConfigKeys.includes(k)) .reduce((acc, [k, v]) => { - acc[`process.env.npm_config_${k.replace(/-/g, '_')}`] = v.toString() + acc[`process.env.npm_config_${k.replace(/-/g, '_')}`] = + result(v, { ...dirs }).toString() return acc }, {}), }) @@ -138,7 +142,7 @@ const LoadMockNpm = async (t, { if (load) { await npm.load() - for (const [k, v] of Object.entries(result(config, { npm, prefix, cache }))) { + for (const [k, v] of Object.entries(result(config, { npm, ...dirs }))) { if (typeof v === 'object' && v.value && v.where) { npm.config.set(k, v.value, v.where) } else { @@ -148,20 +152,16 @@ const LoadMockNpm = async (t, { // Set global loglevel *again* since it possibly got reset during load // XXX: remove with npmlog setLoglevel(t, config.loglevel, false) - npm.prefix = prefix - npm.cache = cache - npm.globalPrefix = globalPrefix + npm.prefix = dirs.prefix + npm.cache = dirs.cache + npm.globalPrefix = dirs.globalPrefix } return { ...rest, + ...dirs, Npm, npm, - home, - prefix, - globalPrefix, - testdir: dir, - cache, debugFile: async () => { const readFiles = npm.logFiles.map(f => fs.readFile(f)) const logFiles = await Promise.all(readFiles) @@ -171,7 +171,7 @@ const LoadMockNpm = async (t, { .join('\n') }, timingFile: async () => { - const data = await fs.readFile(path.resolve(cache, '_timing.json'), 'utf8') + const data = await fs.readFile(path.resolve(dirs.cache, '_timing.json'), 'utf8') return JSON.parse(data) // XXX: this fails if multiple timings are written }, } diff --git a/deps/npm/test/lib/commands/init.js b/deps/npm/test/lib/commands/init.js index 32816adbc272ee..e7b2739341437c 100644 --- a/deps/npm/test/lib/commands/init.js +++ b/deps/npm/test/lib/commands/init.js @@ -136,6 +136,44 @@ t.test('npm init @scope/name', async t => { await init.exec(['@npmcli/something']) }) +t.test('npm init @scope@spec', async t => { + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../../lib/commands/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['@npmcli/create@foo'], + 'should npx with scoped packages' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) + await init.exec(['@npmcli@foo']) +}) + +t.test('npm init @scope/name@spec', async t => { + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../../lib/commands/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['@npmcli/create-something@foo'], + 'should npx with scoped packages' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) + await init.exec(['@npmcli/something@foo']) +}) + t.test('npm init git spec', async t => { t.plan(1) npm.localPrefix = t.testdir({}) diff --git a/deps/npm/test/lib/commands/link.js b/deps/npm/test/lib/commands/link.js index a01de0b2479909..5bd7a3f1480ae1 100644 --- a/deps/npm/test/lib/commands/link.js +++ b/deps/npm/test/lib/commands/link.js @@ -514,3 +514,39 @@ t.test('--global option', async t => { 'should throw an useful error' ) }) + +t.test('hash character in working directory path', async t => { + const testdir = t.testdir({ + 'global-prefix': { + lib: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + }, + }, + }, + 'i_like_#_in_my_paths': { + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + }), + }, + }, + }) + npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') + npm.prefix = resolve(testdir, 'i_like_#_in_my_paths', 'test-pkg-link') + + link.workspacePaths = null + await link.exec([]) + const links = await printLinks({ + path: resolve(npm.globalDir, '..'), + global: true, + }) + + t.matchSnapshot(links, 'should create a global link to current pkg, even within path with hash') +}) diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index cd692a93f50772..62e48ce6050db1 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -3,6 +3,7 @@ const { resolve, dirname, join } = require('path') const { load: loadMockNpm } = require('../fixtures/mock-npm.js') const mockGlobals = require('../fixtures/mock-globals') +const fs = require('@npmcli/fs') // delete this so that we don't have configs from the fact that it // is being run by 'npm test' @@ -435,23 +436,42 @@ t.test('debug log', async t => { t.match(debug, log2.join(' '), 'after load log appears') }) - t.test('with bad dir', async t => { - const { npm } = await loadMockNpm(t, { + t.test('can load with bad dir', async t => { + const { npm, testdir } = await loadMockNpm(t, { + load: false, config: { - 'logs-dir': 'LOGS_DIR', - }, - mocks: { - '@npmcli/fs': { - mkdir: async (dir) => { - if (dir.includes('LOGS_DIR')) { - throw new Error('err') - } - }, - }, + 'logs-dir': (c) => join(c.testdir, 'my_logs_dir'), }, }) + const logsDir = join(testdir, 'my_logs_dir') + + // make logs dir a file before load so it files + await fs.writeFile(logsDir, 'A_TEXT_FILE') + await t.resolves(npm.load(), 'loads with invalid logs dir') + + t.equal(npm.logFiles.length, 0, 'no log files array') + t.strictSame(fs.readFileSync(logsDir, 'utf-8'), 'A_TEXT_FILE') + }) +}) + +t.test('cache dir', async t => { + t.test('creates a cache dir', async t => { + const { npm } = await loadMockNpm(t) + + t.ok(fs.existsSync(npm.cache), 'cache dir exists') + }) + + t.test('can load with a bad cache dir', async t => { + const { npm, cache } = await loadMockNpm(t, { + load: false, + // The easiest way to make mkdir(cache) fail is to make it a file. + // This will have the same effect as if its read only or inaccessible. + cacheDir: 'A_TEXT_FILE', + }) + + await t.resolves(npm.load(), 'loads with cache dir as a file') - t.equal(npm.logFiles.length, 0, 'no log file') + t.equal(fs.readFileSync(cache, 'utf-8'), 'A_TEXT_FILE') }) }) From cc6e0fc8ffd08c63662f97fde8f0b248880a7826 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Fri, 29 Jul 2022 00:00:40 +0200 Subject: [PATCH 027/177] src: improve SPKAC::ExportChallenge() Declare buf as an unsigned char to get rid of the reinterpret_cast and do not ignore the return value of ASN1_STRING_TO_UTF8. This also removes the need to call strlen() on the result. PR-URL: https://github.com/nodejs/node/pull/44002 Reviewed-By: Darshan Sen Reviewed-By: Luigi Pinca Reviewed-By: Anna Henningsen --- src/crypto/crypto_spkac.cc | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/crypto/crypto_spkac.cc b/src/crypto/crypto_spkac.cc index c4f43b8ced5ed7..92484847afb1dc 100644 --- a/src/crypto/crypto_spkac.cc +++ b/src/crypto/crypto_spkac.cc @@ -100,12 +100,9 @@ ByteSource ExportChallenge(const ArrayBufferOrViewContents& input) { if (!sp) return ByteSource(); - char* buf = nullptr; - ASN1_STRING_to_UTF8( - reinterpret_cast(&buf), - sp->spkac->challenge); - - return ByteSource::Allocated(buf, strlen(buf)); + unsigned char* buf = nullptr; + int buf_size = ASN1_STRING_to_UTF8(&buf, sp->spkac->challenge); + return (buf_size >= 0) ? ByteSource::Allocated(buf, buf_size) : ByteSource(); } void ExportChallenge(const FunctionCallbackInfo& args) { From 728e18e0252eb9d93cae096acd4f03c610f5931c Mon Sep 17 00:00:00 2001 From: Kohei Ueno Date: Fri, 29 Jul 2022 14:47:56 +0900 Subject: [PATCH 028/177] src: fix to use replacement character PR-URL: https://github.com/nodejs/node/pull/43999 Fixes: https://github.com/nodejs/node/issues/43962 Reviewed-By: Antoine du Hamel Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Darshan Sen Reviewed-By: LiviaMedeiros Reviewed-By: Feng Yu --- src/node_i18n.cc | 6 +++++- test/parallel/test-whatwg-encoding-custom-textdecoder.js | 7 +++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/node_i18n.cc b/src/node_i18n.cc index 7a489686d30330..581d52a7d05738 100644 --- a/src/node_i18n.cc +++ b/src/node_i18n.cc @@ -425,7 +425,11 @@ void ConverterObject::Create(const FunctionCallbackInfo& args) { nullptr, nullptr, nullptr, &status); } - new ConverterObject(env, obj, conv, flags); + auto converter = new ConverterObject(env, obj, conv, flags); + size_t sublen = ucnv_getMinCharSize(conv); + std::string sub(sublen, '?'); + converter->set_subst_chars(sub.c_str()); + args.GetReturnValue().Set(obj); } diff --git a/test/parallel/test-whatwg-encoding-custom-textdecoder.js b/test/parallel/test-whatwg-encoding-custom-textdecoder.js index 1fa65164c70678..fe08edc597d3f4 100644 --- a/test/parallel/test-whatwg-encoding-custom-textdecoder.js +++ b/test/parallel/test-whatwg-encoding-custom-textdecoder.js @@ -199,3 +199,10 @@ if (common.hasIntl) { const str = decoder.decode(chunk); assert.strictEqual(str, 'foo\ufffd'); } + +if (common.hasIntl) { + const decoder = new TextDecoder('Shift_JIS'); + const chunk = new Uint8Array([-1]); + const str = decoder.decode(chunk); + assert.strictEqual(str, '\ufffd'); +} From b19564b9d2eae50468d2d714676eb58df25c93b4 Mon Sep 17 00:00:00 2001 From: Jacob Smith <3012099+JakobJingleheimer@users.noreply.github.com> Date: Fri, 29 Jul 2022 10:42:55 +0200 Subject: [PATCH 029/177] test: refactor ESM tests to improve performance PR-URL: https://github.com/nodejs/node/pull/43784 Reviewed-By: Geoffrey Booth Reviewed-By: Antoine du Hamel --- lib/internal/modules/esm/loader.js | 2 +- test/common/index.js | 33 +- test/common/index.mjs | 8 +- test/es-module/test-cjs-esm-warn.js | 102 ++- test/es-module/test-esm-cjs-builtins.js | 29 +- test/es-module/test-esm-cjs-exports.js | 52 +- .../test-esm-cjs-load-error-note.mjs | 243 ++--- test/es-module/test-esm-cjs-main.js | 31 +- .../es-module/test-esm-encoded-path-native.js | 25 +- .../test-esm-experimental-warnings.mjs | 83 +- test/es-module/test-esm-export-not-found.mjs | 67 +- .../test-esm-import-json-named-export.mjs | 40 +- test/es-module/test-esm-initialization.mjs | 39 +- test/es-module/test-esm-invalid-extension.js | 13 - test/es-module/test-esm-invalid-pjson.js | 45 +- test/es-module/test-esm-json.mjs | 40 +- test/es-module/test-esm-loader-chaining.mjs | 842 +++++++++--------- .../test-esm-loader-http-imports.mjs | 59 +- test/es-module/test-esm-loader-not-found.mjs | 43 +- .../test-esm-loader-obsolete-hooks.mjs | 50 +- test/es-module/test-esm-loader-thenable.mjs | 76 +- .../test-esm-loader-with-syntax-error.mjs | 36 +- ...est-esm-module-not-found-commonjs-hint.mjs | 59 +- test/es-module/test-esm-non-js.mjs | 35 +- test/es-module/test-esm-nowarn-exports.mjs | 36 +- .../test-esm-preserve-symlinks-main.js | 18 +- test/es-module/test-esm-repl-imports.js | 35 +- test/es-module/test-esm-specifiers.mjs | 137 +-- test/es-module/test-esm-syntax-error.mjs | 27 +- test/es-module/test-esm-tla-unfinished.mjs | 213 +++-- .../test-esm-unknown-or-no-extension.js | 60 +- test/es-module/test-esm-wasm.mjs | 68 +- test/es-module/test-http-imports-cli.mjs | 82 +- .../assertionless-json-import.mjs | 6 +- .../builtin-named-exports-loader.mjs | 2 +- .../es-module-loaders/example-loader.mjs | 4 +- .../es-module-loaders/hook-resolve-type.mjs | 2 +- .../es-module-loaders/hooks-custom.mjs | 8 +- .../es-module-loaders/hooks-obsolete.mjs | 18 +- .../es-module-loaders/http-loader.mjs | 4 +- .../es-module-loaders/loader-get-format.mjs | 10 - .../loader-invalid-format.mjs | 6 +- .../es-module-loaders/loader-invalid-url.mjs | 4 +- .../loader-load-bad-next-url.mjs | 2 +- .../loader-load-impersonating-next-url.mjs | 2 +- .../loader-load-next-modified.mjs | 2 +- .../loader-load-passthru.mjs | 2 +- .../loader-resolve-bad-next-specifier.mjs | 2 +- .../es-module-loaders/loader-resolve-foo.mjs | 2 +- .../loader-resolve-multiple-next-calls.mjs | 4 +- .../loader-resolve-passthru.mjs | 2 +- .../es-module-loaders/loader-shared-dep.mjs | 4 +- .../loader-unknown-builtin-module.mjs | 2 +- .../missing-dynamic-instantiate-hook.mjs | 4 +- .../not-found-assert-loader.mjs | 8 +- .../es-module-loaders/string-sources.mjs | 4 +- 56 files changed, 1390 insertions(+), 1442 deletions(-) delete mode 100644 test/es-module/test-esm-invalid-extension.js delete mode 100644 test/fixtures/es-module-loaders/loader-get-format.mjs diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 223fd68a79fd00..027077cbb6e781 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -336,7 +336,7 @@ class ESMLoader { * A list of exports from user-defined loaders (as returned by * ESMLoader.import()). */ - async addCustomLoaders( + addCustomLoaders( customLoaders = [], ) { for (let i = 0; i < customLoaders.length; i++) { diff --git a/test/common/index.js b/test/common/index.js index e14fbe59d1946b..9ba2fd9cd9edb5 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -24,7 +24,7 @@ const process = global.process; // Some tests tamper with the process global. const assert = require('assert'); -const { exec, execSync, spawnSync } = require('child_process'); +const { exec, execSync, spawn, spawnSync } = require('child_process'); const fs = require('fs'); // Do not require 'os' until needed so that test-os-checked-function can // monkey patch it. If 'os' is required here, that test will fail. @@ -842,6 +842,36 @@ function requireNoPackageJSONAbove(dir = __dirname) { } } +function spawnPromisified(...args) { + let stderr = ''; + let stdout = ''; + + const child = spawn(...args); + child.stderr.setEncoding('utf8'); + child.stderr.on('data', (data) => { stderr += data; }); + child.stdout.setEncoding('utf8'); + child.stdout.on('data', (data) => { stdout += data; }); + + return new Promise((resolve, reject) => { + child.on('close', (code, signal) => { + resolve({ + code, + signal, + stderr, + stdout, + }); + }); + child.on('error', (code, signal) => { + reject({ + code, + signal, + stderr, + stdout, + }); + }); + }); +} + const common = { allowGlobals, buildType, @@ -891,6 +921,7 @@ const common = { skipIfEslintMissing, skipIfInspectorDisabled, skipIfWorker, + spawnPromisified, get enoughTestMem() { return require('os').totalmem() > 0x70000000; /* 1.75 Gb */ diff --git a/test/common/index.mjs b/test/common/index.mjs index a3a34ae7f04435..2b30f499343cc4 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -23,6 +23,7 @@ const { hasCrypto, hasIPv6, childShouldThrowAndAbort, + checkoutEOL, createZeroFilledFile, platformTimeout, allowGlobals, @@ -47,7 +48,8 @@ const { getArrayBufferViews, getBufferSources, getTTYfd, - runWithInvalidFD + runWithInvalidFD, + spawnPromisified, } = common; export { @@ -70,6 +72,7 @@ export { hasCrypto, hasIPv6, childShouldThrowAndAbort, + checkoutEOL, createZeroFilledFile, platformTimeout, allowGlobals, @@ -95,5 +98,6 @@ export { getBufferSources, getTTYfd, runWithInvalidFD, - createRequire + createRequire, + spawnPromisified, }; diff --git a/test/es-module/test-cjs-esm-warn.js b/test/es-module/test-cjs-esm-warn.js index d7eeb65b152a4a..c1d60a209502bb 100644 --- a/test/es-module/test-cjs-esm-warn.js +++ b/test/es-module/test-cjs-esm-warn.js @@ -1,10 +1,12 @@ 'use strict'; -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const assert = require('assert'); -const path = require('path'); +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const path = require('node:path'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); + const requiringCjsAsEsm = path.resolve(fixtures.path('/es-modules/cjs-esm.js')); const requiringEsm = path.resolve(fixtures.path('/es-modules/cjs-esm-esm.js')); @@ -12,53 +14,55 @@ const pjson = path.resolve( fixtures.path('/es-modules/package-type-module/package.json') ); -{ - const required = path.resolve( - fixtures.path('/es-modules/package-type-module/cjs.js') - ); - const basename = 'cjs.js'; - const child = spawn(process.execPath, [requiringCjsAsEsm]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', common.mustCall((code, signal) => { + +describe('CJS ↔︎ ESM interop warnings', { concurrency: true }, () => { + + it(async () => { + const required = path.resolve( + fixtures.path('/es-modules/package-type-module/cjs.js') + ); + const basename = 'cjs.js'; + const { code, signal, stderr } = await spawnPromisified(execPath, [requiringCjsAsEsm]); + + assert.ok( + stderr.replaceAll('\r', '').includes( + `Error [ERR_REQUIRE_ESM]: require() of ES Module ${required} from ${requiringCjsAsEsm} not supported.\n` + ) + ); + assert.ok( + stderr.replaceAll('\r', '').includes( + `Instead rename ${basename} to end in .cjs, change the requiring ` + + 'code to use dynamic import() which is available in all CommonJS ' + + `modules, or change "type": "module" to "type": "commonjs" in ${pjson} to ` + + 'treat all .js files as CommonJS (using .mjs for all ES modules ' + + 'instead).\n' + ) + ); + assert.strictEqual(code, 1); assert.strictEqual(signal, null); + }); - assert.ok(stderr.replaceAll('\r', '').includes( - `Error [ERR_REQUIRE_ESM]: require() of ES Module ${required} from ${ - requiringCjsAsEsm} not supported.\n`)); - assert.ok(stderr.replaceAll('\r', '').includes( - `Instead rename ${basename} to end in .cjs, change the requiring ` + - 'code to use dynamic import() which is available in all CommonJS ' + - `modules, or change "type": "module" to "type": "commonjs" in ${pjson} to ` + - 'treat all .js files as CommonJS (using .mjs for all ES modules ' + - 'instead).\n')); - })); -} + it(async () => { + const required = path.resolve( + fixtures.path('/es-modules/package-type-module/esm.js') + ); + const basename = 'esm.js'; + const { code, signal, stderr } = await spawnPromisified(execPath, [requiringEsm]); + + assert.ok( + stderr.replace(/\r/g, '').includes( + `Error [ERR_REQUIRE_ESM]: require() of ES Module ${required} from ${requiringEsm} not supported.\n` + ) + ); + assert.ok( + stderr.replace(/\r/g, '').includes( + `Instead change the require of ${basename} in ${requiringEsm} to` + + ' a dynamic import() which is available in all CommonJS modules.\n' + ) + ); -{ - const required = path.resolve( - fixtures.path('/es-modules/package-type-module/esm.js') - ); - const basename = 'esm.js'; - const child = spawn(process.execPath, [requiringEsm]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', common.mustCall((code, signal) => { assert.strictEqual(code, 1); assert.strictEqual(signal, null); - - assert.ok(stderr.replace(/\r/g, '').includes( - `Error [ERR_REQUIRE_ESM]: require() of ES Module ${required} from ${ - requiringEsm} not supported.\n`)); - assert.ok(stderr.replace(/\r/g, '').includes( - `Instead change the require of ${basename} in ${requiringEsm} to` + - ' a dynamic import() which is available in all CommonJS modules.\n')); - })); -} + }); +}); diff --git a/test/es-module/test-esm-cjs-builtins.js b/test/es-module/test-esm-cjs-builtins.js index 63aae732904137..c49abfd8c93222 100644 --- a/test/es-module/test-esm-cjs-builtins.js +++ b/test/es-module/test-esm-cjs-builtins.js @@ -1,21 +1,20 @@ 'use strict'; -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const assert = require('assert'); +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); + const entry = fixtures.path('/es-modules/builtin-imports-case.mjs'); -const child = spawn(process.execPath, [entry]); -child.stderr.setEncoding('utf8'); -let stdout = ''; -child.stdout.setEncoding('utf8'); -child.stdout.on('data', (data) => { - stdout += data; +describe('ESM: importing builtins & CJS', () => { + it('should work', async () => { + const { code, signal, stdout } = await spawnPromisified(execPath, [entry]); + + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + assert.strictEqual(stdout, 'ok\n'); + }); }); -child.on('close', common.mustCall((code, signal) => { - assert.strictEqual(code, 0); - assert.strictEqual(signal, null); - assert.strictEqual(stdout, 'ok\n'); -})); diff --git a/test/es-module/test-esm-cjs-exports.js b/test/es-module/test-esm-cjs-exports.js index 7db2c6fdb5971b..4a5a9ffceecc2a 100644 --- a/test/es-module/test-esm-cjs-exports.js +++ b/test/es-module/test-esm-cjs-exports.js @@ -1,35 +1,29 @@ 'use strict'; -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const assert = require('assert'); +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); -const entry = fixtures.path('/es-modules/cjs-exports.mjs'); -let child = spawn(process.execPath, [entry]); -child.stderr.setEncoding('utf8'); -let stdout = ''; -child.stdout.setEncoding('utf8'); -child.stdout.on('data', (data) => { - stdout += data; -}); -child.on('close', common.mustCall((code, signal) => { - assert.strictEqual(code, 0); - assert.strictEqual(signal, null); - assert.strictEqual(stdout, 'ok\n'); -})); +describe('ESM: importing CJS', { concurrency: true }, () => { + it('should support valid CJS exports', async () => { + const validEntry = fixtures.path('/es-modules/cjs-exports.mjs'); + const { code, signal, stdout } = await spawnPromisified(execPath, [validEntry]); + + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + assert.strictEqual(stdout, 'ok\n'); + }); + + it('should eror on invalid CJS exports', async () => { + const invalidEntry = fixtures.path('/es-modules/cjs-exports-invalid.mjs'); + const { code, signal, stderr } = await spawnPromisified(execPath, [invalidEntry]); -const entryInvalid = fixtures.path('/es-modules/cjs-exports-invalid.mjs'); -child = spawn(process.execPath, [entryInvalid]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + assert.ok(stderr.includes('Warning: To load an ES module')); + assert.ok(stderr.includes('Unexpected token \'export\'')); + }); }); -child.on('close', common.mustCall((code, signal) => { - assert.strictEqual(code, 1); - assert.strictEqual(signal, null); - assert.ok(stderr.includes('Warning: To load an ES module')); - assert.ok(stderr.includes('Unexpected token \'export\'')); -})); diff --git a/test/es-module/test-esm-cjs-load-error-note.mjs b/test/es-module/test-esm-cjs-load-error-note.mjs index 0298432441391c..4df9e903eb627a 100644 --- a/test/es-module/test-esm-cjs-load-error-note.mjs +++ b/test/es-module/test-esm-cjs-load-error-note.mjs @@ -1,163 +1,96 @@ -import { mustCall } from '../common/index.mjs'; -import assert from 'assert'; -import fixtures from '../common/fixtures.js'; -import { spawn } from 'child_process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const Export1 = fixtures.path('/es-modules/es-note-unexpected-export-1.cjs'); -const Export2 = fixtures.path('/es-modules/es-note-unexpected-export-2.cjs'); -const Import1 = fixtures.path('/es-modules/es-note-unexpected-import-1.cjs'); -const Import2 = fixtures.path('/es-modules/es-note-promiserej-import-2.cjs'); -const Import3 = fixtures.path('/es-modules/es-note-unexpected-import-3.cjs'); -const Import4 = fixtures.path('/es-modules/es-note-unexpected-import-4.cjs'); -const Import5 = fixtures.path('/es-modules/es-note-unexpected-import-5.cjs'); -const Error1 = fixtures.path('/es-modules/es-note-error-1.mjs'); -const Error2 = fixtures.path('/es-modules/es-note-error-2.mjs'); -const Error3 = fixtures.path('/es-modules/es-note-error-3.mjs'); -const Error4 = fixtures.path('/es-modules/es-note-error-4.mjs'); // Expect note to be included in the error output const expectedNote = 'To load an ES module, ' + 'set "type": "module" in the package.json ' + 'or use the .mjs extension.'; -const expectedCode = 1; - -const pExport1 = spawn(process.execPath, [Export1]); -let pExport1Stderr = ''; -pExport1.stderr.setEncoding('utf8'); -pExport1.stderr.on('data', (data) => { - pExport1Stderr += data; -}); -pExport1.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pExport1Stderr.includes(expectedNote), - `${expectedNote} not found in ${pExport1Stderr}`); -})); - - -const pExport2 = spawn(process.execPath, [Export2]); -let pExport2Stderr = ''; -pExport2.stderr.setEncoding('utf8'); -pExport2.stderr.on('data', (data) => { - pExport2Stderr += data; -}); -pExport2.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pExport2Stderr.includes(expectedNote), - `${expectedNote} not found in ${pExport2Stderr}`); -})); - -const pImport1 = spawn(process.execPath, [Import1]); -let pImport1Stderr = ''; -pImport1.stderr.setEncoding('utf8'); -pImport1.stderr.on('data', (data) => { - pImport1Stderr += data; -}); -pImport1.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pImport1Stderr.includes(expectedNote), - `${expectedNote} not found in ${pExport1Stderr}`); -})); - -// Note this test shouldn't include the note -const pImport2 = spawn(process.execPath, [Import2]); -let pImport2Stderr = ''; -pImport2.stderr.setEncoding('utf8'); -pImport2.stderr.on('data', (data) => { - pImport2Stderr += data; -}); -pImport2.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(!pImport2Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pImport2Stderr}`); -})); - -const pImport3 = spawn(process.execPath, [Import3]); -let pImport3Stderr = ''; -pImport3.stderr.setEncoding('utf8'); -pImport3.stderr.on('data', (data) => { - pImport3Stderr += data; -}); -pImport3.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pImport3Stderr.includes(expectedNote), - `${expectedNote} not found in ${pImport3Stderr}`); -})); - - -const pImport4 = spawn(process.execPath, [Import4]); -let pImport4Stderr = ''; -pImport4.stderr.setEncoding('utf8'); -pImport4.stderr.on('data', (data) => { - pImport4Stderr += data; -}); -pImport4.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pImport4Stderr.includes(expectedNote), - `${expectedNote} not found in ${pImport4Stderr}`); -})); - -// Must exit non-zero and show note -const pImport5 = spawn(process.execPath, [Import5]); -let pImport5Stderr = ''; -pImport5.stderr.setEncoding('utf8'); -pImport5.stderr.on('data', (data) => { - pImport5Stderr += data; -}); -pImport5.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(!pImport5Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pImport5Stderr}`); -})); - -const pError1 = spawn(process.execPath, [Error1]); -let pError1Stderr = ''; -pError1.stderr.setEncoding('utf8'); -pError1.stderr.on('data', (data) => { - pError1Stderr += data; -}); -pError1.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pError1Stderr.includes('Error: some error')); - assert.ok(!pError1Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pError1Stderr}`); -})); - -const pError2 = spawn(process.execPath, [Error2]); -let pError2Stderr = ''; -pError2.stderr.setEncoding('utf8'); -pError2.stderr.on('data', (data) => { - pError2Stderr += data; -}); -pError2.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pError2Stderr.includes('string')); - assert.ok(!pError2Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pError2Stderr}`); -})); - -const pError3 = spawn(process.execPath, [Error3]); -let pError3Stderr = ''; -pError3.stderr.setEncoding('utf8'); -pError3.stderr.on('data', (data) => { - pError3Stderr += data; -}); -pError3.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pError3Stderr.includes('null')); - assert.ok(!pError3Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pError3Stderr}`); -})); - -const pError4 = spawn(process.execPath, [Error4]); -let pError4Stderr = ''; -pError4.stderr.setEncoding('utf8'); -pError4.stderr.on('data', (data) => { - pError4Stderr += data; +const mustIncludeMessage = { + getMessage: () => (stderr) => `${expectedNote} not found in ${stderr}`, + includeNote: true, +}; +const mustNotIncludeMessage = { + getMessage: () => (stderr) => `${expectedNote} must not be included in ${stderr}`, + includeNote: false, +}; + +describe('ESM: Errors for unexpected exports', { concurrency: true }, () => { + for ( + const { errorNeedle, filePath, getMessage, includeNote } + of [ + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-unexpected-export-1.cjs'), + ...mustIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-unexpected-import-1.cjs'), + ...mustIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-promiserej-import-2.cjs'), + ...mustNotIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-unexpected-import-3.cjs'), + ...mustIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-unexpected-import-4.cjs'), + ...mustIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-unexpected-import-5.cjs'), + ...mustNotIncludeMessage, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-error-1.mjs'), + ...mustNotIncludeMessage, + errorNeedle: /Error: some error/, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-error-2.mjs'), + ...mustNotIncludeMessage, + errorNeedle: /string/, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-error-3.mjs'), + ...mustNotIncludeMessage, + errorNeedle: /null/, + }, + { + // name: '', + filePath: fixtures.path('/es-modules/es-note-error-4.mjs'), + ...mustNotIncludeMessage, + errorNeedle: /undefined/, + }, + ] + ) { + it(`should ${includeNote ? '' : 'NOT'} include note`, async () => { + const { code, stderr } = await spawnPromisified(execPath, [filePath]); + + assert.strictEqual(code, 1); + + if (errorNeedle != null) assert.match(stderr, errorNeedle); + + const shouldIncludeNote = stderr.includes(expectedNote); + assert.ok( + includeNote ? shouldIncludeNote : !shouldIncludeNote, + `${filePath} ${getMessage(stderr)}`, + ); + }); + } }); -pError4.on('close', mustCall((code) => { - assert.strictEqual(code, expectedCode); - assert.ok(pError4Stderr.includes('undefined')); - assert.ok(!pError4Stderr.includes(expectedNote), - `${expectedNote} must not be included in ${pError4Stderr}`); -})); diff --git a/test/es-module/test-esm-cjs-main.js b/test/es-module/test-esm-cjs-main.js index 92f4124ccaab8a..bb209648356d10 100644 --- a/test/es-module/test-esm-cjs-main.js +++ b/test/es-module/test-esm-cjs-main.js @@ -1,21 +1,20 @@ 'use strict'; -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const assert = require('assert'); +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); -const entry = fixtures.path('/es-modules/cjs.js'); -const child = spawn(process.execPath, [entry]); -child.stderr.setEncoding('utf8'); -let stdout = ''; -child.stdout.setEncoding('utf8'); -child.stdout.on('data', (data) => { - stdout += data; +describe('ESM: importing CJS', () => { + it('should work', async () => { + const { code, signal, stdout } = await spawnPromisified(execPath, [ + fixtures.path('/es-modules/cjs.js'), + ]); + + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + assert.strictEqual(stdout, 'executed\n'); + }); }); -child.on('close', common.mustCall((code, signal) => { - assert.strictEqual(code, 0); - assert.strictEqual(signal, null); - assert.strictEqual(stdout, 'executed\n'); -})); diff --git a/test/es-module/test-esm-encoded-path-native.js b/test/es-module/test-esm-encoded-path-native.js index b8f5719b6089ee..7574e55278bd1f 100644 --- a/test/es-module/test-esm-encoded-path-native.js +++ b/test/es-module/test-esm-encoded-path-native.js @@ -1,11 +1,18 @@ 'use strict'; -require('../common'); -const fixtures = require('../common/fixtures'); -const assert = require('assert'); -const { spawn } = require('child_process'); - -const native = fixtures.path('es-module-url/native.mjs'); -const child = spawn(process.execPath, [native]); -child.on('exit', (code) => { - assert.strictEqual(code, 1); + +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); + + +describe('ESM: importing an encoded path', () => { + it('should throw', async () => { + const { code } = await spawnPromisified(execPath, [ + fixtures.path('es-module-url/native.mjs'), + ]); + + assert.strictEqual(code, 1); + }); }); diff --git a/test/es-module/test-esm-experimental-warnings.mjs b/test/es-module/test-esm-experimental-warnings.mjs index b6ef757a88302e..fc167c63584b87 100644 --- a/test/es-module/test-esm-experimental-warnings.mjs +++ b/test/es-module/test-esm-experimental-warnings.mjs @@ -1,55 +1,48 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fileURL } from '../common/fixtures.mjs'; -import { doesNotMatch, match, strictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { doesNotMatch, match, strictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -// Verify no warnings are printed when no experimental features are enabled or used -{ - const input = `import ${JSON.stringify(fileURL('es-module-loaders', 'module-named-exports.mjs'))}`; - const child = spawn(execPath, [ - '--input-type=module', - '--eval', - input, - ]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { stderr += data; }); - child.on('close', mustCall((code, signal) => { - strictEqual(code, 0); - strictEqual(signal, null); +describe('ESM: warn for obsolete hooks provided', { concurrency: true }, () => { + it('should not print warnings when no experimental features are enabled or used', async () => { + const { code, signal, stderr } = await spawnPromisified(execPath, [ + '--input-type=module', + '--eval', + `import ${JSON.stringify(fileURL('es-module-loaders', 'module-named-exports.mjs'))}`, + ]); + doesNotMatch( stderr, /ExperimentalWarning/, new Error('No experimental warning(s) should be emitted when no experimental feature is enabled') ); - })); -} - -// Verify experimental warning is printed when experimental feature is enabled -for ( - const [experiment, arg] of [ - [/Custom ESM Loaders/, `--experimental-loader=${fileURL('es-module-loaders', 'hooks-custom.mjs')}`], - [/Network Imports/, '--experimental-network-imports'], - [/specifier resolution/, '--experimental-specifier-resolution=node'], - ] -) { - const input = `import ${JSON.stringify(fileURL('es-module-loaders', 'module-named-exports.mjs'))}`; - const child = spawn(execPath, [ - arg, - '--input-type=module', - '--eval', - input, - ]); - - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { stderr += data; }); - child.on('close', mustCall((code, signal) => { strictEqual(code, 0); strictEqual(signal, null); - match(stderr, /ExperimentalWarning/); - match(stderr, experiment); - })); -} + }); + + describe('experimental warnings for enabled experimental feature', () => { + for ( + const [experiment, arg] of [ + [/Custom ESM Loaders/, `--experimental-loader=${fileURL('es-module-loaders', 'hooks-custom.mjs')}`], + [/Network Imports/, '--experimental-network-imports'], + [/specifier resolution/, '--experimental-specifier-resolution=node'], + ] + ) { + it(`should print for ${experiment.toString().replaceAll('/', '')}`, async () => { + const { code, signal, stderr } = await spawnPromisified(execPath, [ + arg, + '--input-type=module', + '--eval', + `import ${JSON.stringify(fileURL('es-module-loaders', 'module-named-exports.mjs'))}`, + ]); + + match(stderr, /ExperimentalWarning/); + match(stderr, experiment); + strictEqual(code, 0); + strictEqual(signal, null); + }); + } + }); +}); diff --git a/test/es-module/test-esm-export-not-found.mjs b/test/es-module/test-esm-export-not-found.mjs index cdfe6df0fcde31..48ccd0f910227f 100644 --- a/test/es-module/test-esm-export-not-found.mjs +++ b/test/es-module/test-esm-export-not-found.mjs @@ -1,39 +1,48 @@ -import { mustCall } from '../common/index.mjs'; -import { path } from '../common/fixtures.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const importStatement = - 'import { foo, notfound } from \'./module-named-exports.mjs\';'; + +const importStatement = 'import { foo, notfound } from \'./module-named-exports.mjs\';'; const importStatementMultiline = `import { foo, notfound } from './module-named-exports.mjs'; `; -[importStatement, importStatementMultiline].forEach((input) => { - const child = spawn(execPath, [ - '--input-type=module', - '--eval', - input, - ], { - cwd: path('es-module-loaders'), - }); +describe('ESM: nonexistent exports', { concurrency: true }, () => { + for ( + const { name, input } + of [ + { + input: importStatement, + name: 'single-line import', + }, + { + input: importStatementMultiline, + name: 'multi-line import', + }, + ] + ) { + it(`should throw for nonexistent exports via ${name}`, async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--input-type=module', + '--eval', + input, + ], { + cwd: fixtures.path('es-module-loaders'), + }); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); + assert.notStrictEqual(code, 0); - // SyntaxError: The requested module './module-named-exports.mjs' - // does not provide an export named 'notfound' - match(stderr, /SyntaxError:/); - // The quotes ensure that the path starts with ./ and not ../ - match(stderr, /'\.\/module-named-exports\.mjs'/); - match(stderr, /notfound/); - })); + // SyntaxError: The requested module './module-named-exports.mjs' + // does not provide an export named 'notfound' + assert.match(stderr, /SyntaxError:/); + // The quotes ensure that the path starts with ./ and not ../ + assert.match(stderr, /'\.\/module-named-exports\.mjs'/); + assert.match(stderr, /notfound/); + }); + } }); diff --git a/test/es-module/test-esm-import-json-named-export.mjs b/test/es-module/test-esm-import-json-named-export.mjs index 3c0f3af662c7cc..c8a4ad8dce3e5e 100644 --- a/test/es-module/test-esm-import-json-named-export.mjs +++ b/test/es-module/test-esm-import-json-named-export.mjs @@ -1,24 +1,22 @@ -import { mustCall } from '../common/index.mjs'; -import { path } from '../common/fixtures.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(execPath, [ - path('es-modules', 'import-json-named-export.mjs'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); +describe('ESM: named JSON exports', { concurrency: true }, () => { + it('should throw, citing named import', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + fixtures.path('es-modules', 'import-json-named-export.mjs'), + ]); + + // SyntaxError: The requested module '../experimental.json' + // does not provide an export named 'ofLife' + assert.match(stderr, /SyntaxError:/); + assert.match(stderr, /'\.\.\/experimental\.json'/); + assert.match(stderr, /'ofLife'/); - // SyntaxError: The requested module '../experimental.json' - // does not provide an export named 'ofLife' - match(stderr, /SyntaxError:/); - match(stderr, /'\.\.\/experimental\.json'/); - match(stderr, /'ofLife'/); -})); + assert.notStrictEqual(code, 0); + }); +}); diff --git a/test/es-module/test-esm-initialization.mjs b/test/es-module/test-esm-initialization.mjs index ab756c7a3619e1..2bfd16135a0189 100644 --- a/test/es-module/test-esm-initialization.mjs +++ b/test/es-module/test-esm-initialization.mjs @@ -1,30 +1,29 @@ -import '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import * as fixtures from '../common/fixtures.mjs'; import assert from 'node:assert'; -import { spawnSync } from 'node:child_process'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -{ // Verify unadulterated source is loaded when there are no loaders - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ +describe('ESM: ensure initialisation happens only once', { concurrency: true }, () => { + it(async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--loader', fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), '--no-warnings', fixtures.path('es-modules', 'runmain.mjs'), - ], - { encoding: 'utf8' }, - ); + ]); - // Length minus 1 because the first match is the needle. - const resolveHookRunCount = (stdout.match(/resolve passthru/g)?.length ?? 0) - 1; + // Length minus 1 because the first match is the needle. + const resolveHookRunCount = (stdout.match(/resolve passthru/g)?.length ?? 0) - 1; - assert.strictEqual(stderr, ''); - /** - * resolveHookRunCount = 2: - * 1. fixtures/…/runmain.mjs - * 2. node:module (imported by fixtures/…/runmain.mjs) - */ - assert.strictEqual(resolveHookRunCount, 2); - assert.strictEqual(status, 0); -} + assert.strictEqual(stderr, ''); + /** + * resolveHookRunCount = 2: + * 1. fixtures/…/runmain.mjs + * 2. node:module (imported by fixtures/…/runmain.mjs) + */ + assert.strictEqual(resolveHookRunCount, 2); + assert.strictEqual(code, 0); + }); +}); diff --git a/test/es-module/test-esm-invalid-extension.js b/test/es-module/test-esm-invalid-extension.js deleted file mode 100644 index cca7704f7994f0..00000000000000 --- a/test/es-module/test-esm-invalid-extension.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict'; -require('../common'); -const fixtures = require('../common/fixtures'); -const assert = require('assert'); -const { spawnSync } = require('child_process'); -const fixture = fixtures.path('/es-modules/import-invalid-ext.mjs'); -const child = spawnSync(process.execPath, [fixture]); -const errMsg = 'TypeError [ERR_UNKNOWN_FILE_EXTENSION]: Unknown file extension'; - -assert.strictEqual(child.status, 1); -assert.strictEqual(child.signal, null); -assert.strictEqual(child.stdout.toString().trim(), ''); -assert.ok(child.stderr.toString().includes(errMsg)); diff --git a/test/es-module/test-esm-invalid-pjson.js b/test/es-module/test-esm-invalid-pjson.js index cdbebb17b4bb34..f3a38018637aa3 100644 --- a/test/es-module/test-esm-invalid-pjson.js +++ b/test/es-module/test-esm-invalid-pjson.js @@ -1,27 +1,28 @@ 'use strict'; -const { mustCall, checkoutEOL } = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const { strictEqual, ok } = require('assert'); +const { checkoutEOL, spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); -const entry = fixtures.path('/es-modules/import-invalid-pjson.mjs'); -const invalidJson = fixtures.path('/node_modules/invalid-pjson/package.json'); -const child = spawn(process.execPath, [entry]); -child.stderr.setEncoding('utf8'); -let stderr = ''; -child.stderr.on('data', (data) => { - stderr += data; +describe('ESM: Package.json', { concurrency: true }, () => { + it('should throw on invalid pson', async () => { + const entry = fixtures.path('/es-modules/import-invalid-pjson.mjs'); + const invalidJson = fixtures.path('/node_modules/invalid-pjson/package.json'); + + const { code, signal, stderr } = await spawnPromisified(execPath, [entry]); + + assert.ok( + stderr.includes( + `[ERR_INVALID_PACKAGE_CONFIG]: Invalid package config ${invalidJson} ` + + `while importing "invalid-pjson" from ${entry}. ` + + `Unexpected token } in JSON at position ${12 + checkoutEOL.length * 2}` + ), + stderr + ); + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + }); }); -child.on('close', mustCall((code, signal) => { - strictEqual(code, 1); - strictEqual(signal, null); - ok( - stderr.includes( - `[ERR_INVALID_PACKAGE_CONFIG]: Invalid package config ${invalidJson} ` + - `while importing "invalid-pjson" from ${entry}. ` + - `Unexpected token } in JSON at position ${12 + checkoutEOL.length * 2}` - ), - stderr); -})); diff --git a/test/es-module/test-esm-json.mjs b/test/es-module/test-esm-json.mjs index 6d55419eedc857..14c86bac80af98 100644 --- a/test/es-module/test-esm-json.mjs +++ b/test/es-module/test-esm-json.mjs @@ -1,27 +1,25 @@ -import '../common/index.mjs'; -import { path } from '../common/fixtures.mjs'; -import { strictEqual, ok } from 'assert'; -import { spawn } from 'child_process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; import secret from '../fixtures/experimental.json' assert { type: 'json' }; -strictEqual(secret.ofLife, 42); -// Test warning message -const child = spawn(process.execPath, [ - path('/es-modules/json-modules.mjs'), -]); +describe('ESM: importing JSON', () => { + it('should load JSON', () => { + assert.strictEqual(secret.ofLife, 42); + }); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', (code, signal) => { - strictEqual(code, 0); - strictEqual(signal, null); - ok(stderr.toString().includes( - 'ExperimentalWarning: Importing JSON modules is an experimental feature. ' + - 'This feature could change at any time' - )); + it('should print an experimental warning', async () => { + const { code, signal, stderr } = await spawnPromisified(execPath, [ + fixtures.path('/es-modules/json-modules.mjs'), + ]); + + assert.match(stderr, /ExperimentalWarning/); + assert.match(stderr, /JSON modules/); + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + }); }); diff --git a/test/es-module/test-esm-loader-chaining.mjs b/test/es-module/test-esm-loader-chaining.mjs index f1ea13495ca5c4..14303cb5c42665 100644 --- a/test/es-module/test-esm-loader-chaining.mjs +++ b/test/es-module/test-esm-loader-chaining.mjs @@ -1,7 +1,9 @@ -import '../common/index.mjs'; -import fixtures from '../common/fixtures.js'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; import assert from 'node:assert'; -import { spawnSync } from 'node:child_process'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; + const setupArgs = [ '--no-warnings', @@ -14,420 +16,420 @@ const commonArgs = [ commonInput, ]; -{ // Verify unadulterated source is loaded when there are no loaders - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - ...setupArgs, - 'import fs from "node:fs"; console.log(typeof fs?.constants?.F_OK )', - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /number/); // node:fs is an object - assert.strictEqual(status, 0); -} - -{ // Verify loaded source is properly different when only load changes something - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /load passthru/); - assert.match(stdout, /resolve passthru/); - assert.match(stdout, /foo/); - assert.strictEqual(status, 0); -} - -{ // Verify multiple changes from hooks result in proper output - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /resolve 42/); // It did go thru resolve-42 - assert.match(stdout, /foo/); // LIFO, so resolve-foo won - assert.strictEqual(status, 0); -} - -{ // Verify modifying context within resolve chain is respected - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-receiving-modified-context.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passing-modified-context.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /bar/); - assert.strictEqual(status, 0); -} - -{ // Verify multiple changes from hooks result in proper output - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /resolve foo/); // It did go thru resolve-foo - assert.match(stdout, /42/); // LIFO, so resolve-42 won - assert.strictEqual(status, 0); -} - -{ // Verify multiple calls to next within same loader receive correct "next" fn - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-multiple-next-calls.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - const countFoos = stdout.match(/resolve foo/g)?.length; - - assert.strictEqual(stderr, ''); - assert.strictEqual(countFoos, 2); - assert.strictEqual(status, 0); -} - -{ // Verify next function's `name` is correct - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /next: nextResolve/); - assert.strictEqual(status, 0); -} - -{ // Verify error thrown for incomplete resolve chain, citing errant loader & hook - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-incomplete.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stdout, /resolve passthru/); - assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); - assert.match(stderr, /loader-resolve-incomplete\.mjs/); - assert.match(stderr, /'resolve'/); - assert.strictEqual(status, 1); -} - -{ // Verify error NOT thrown when nested resolve hook signaled a short circuit - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-next-modified.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.strictEqual(stdout.trim(), 'foo'); - assert.strictEqual(status, 0); -} - -{ // Verify error NOT thrown when nested load hook signaled a short circuit - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-next-modified.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(stderr, ''); - assert.match(stdout, /421/); - assert.strictEqual(status, 0); -} - -{ // Verify resolve chain does break and throws appropriately - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-incomplete.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.doesNotMatch(stdout, /resolve passthru/); - assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); - assert.match(stderr, /loader-resolve-incomplete\.mjs/); - assert.match(stderr, /'resolve'/); - assert.strictEqual(status, 1); -} - -{ // Verify error thrown for incomplete load chain, citing errant loader & hook - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-incomplete.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stdout, /load passthru/); - assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); - assert.match(stderr, /loader-load-incomplete\.mjs/); - assert.match(stderr, /'load'/); - assert.strictEqual(status, 1); -} - -{ // Verify load chain does break and throws appropriately - const { status, stderr, stdout } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-incomplete.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.doesNotMatch(stdout, /load passthru/); - assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); - assert.match(stderr, /loader-load-incomplete\.mjs/); - assert.match(stderr, /'load'/); - assert.strictEqual(status, 1); -} - -{ // Verify error thrown when invalid `specifier` argument passed to `nextResolve` - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-bad-next-specifier.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(status, 1); - assert.match(stderr, /ERR_INVALID_ARG_TYPE/); - assert.match(stderr, /loader-resolve-bad-next-specifier\.mjs/); - assert.match(stderr, /'resolve' hook's nextResolve\(\) specifier/); -} - -{ // Verify error thrown when resolve hook is invalid - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-null-return.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(status, 1); - assert.match(stderr, /ERR_INVALID_RETURN_VALUE/); - assert.match(stderr, /loader-resolve-null-return\.mjs/); - assert.match(stderr, /'resolve' hook's nextResolve\(\)/); - assert.match(stderr, /an object/); - assert.match(stderr, /got null/); -} - -{ // Verify error thrown when invalid `context` argument passed to `nextResolve` - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-resolve-bad-next-context.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stderr, /ERR_INVALID_ARG_TYPE/); - assert.match(stderr, /loader-resolve-bad-next-context\.mjs/); - assert.match(stderr, /'resolve' hook's nextResolve\(\) context/); - assert.strictEqual(status, 1); -} - -{ // Verify error thrown when load hook is invalid - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-null-return.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.strictEqual(status, 1); - assert.match(stderr, /ERR_INVALID_RETURN_VALUE/); - assert.match(stderr, /loader-load-null-return\.mjs/); - assert.match(stderr, /'load' hook's nextLoad\(\)/); - assert.match(stderr, /an object/); - assert.match(stderr, /got null/); -} - -{ // Verify error thrown when invalid `url` argument passed to `nextLoad` - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-bad-next-url.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stderr, /ERR_INVALID_ARG_TYPE/); - assert.match(stderr, /loader-load-bad-next-url\.mjs/); - assert.match(stderr, /'load' hook's nextLoad\(\) url/); - assert.strictEqual(status, 1); -} - -{ // Verify error thrown when invalid `url` argument passed to `nextLoad` - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-impersonating-next-url.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stderr, /ERR_INVALID_ARG_VALUE/); - assert.match(stderr, /loader-load-impersonating-next-url\.mjs/); - assert.match(stderr, /'load' hook's nextLoad\(\) url/); - assert.strictEqual(status, 1); -} - -{ // Verify error thrown when invalid `context` argument passed to `nextLoad` - const { status, stderr } = spawnSync( - process.execPath, - [ - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), - '--loader', - fixtures.fileURL('es-module-loaders', 'loader-load-bad-next-context.mjs'), - ...commonArgs, - ], - { encoding: 'utf8' }, - ); - - assert.match(stderr, /ERR_INVALID_ARG_TYPE/); - assert.match(stderr, /loader-load-bad-next-context\.mjs/); - assert.match(stderr, /'load' hook's nextLoad\(\) context/); - assert.strictEqual(status, 1); -} +describe('ESM: loader chaining', { concurrency: true }, () => { + it('should load unadulterated source when there are no loaders', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + ...setupArgs, + 'import fs from "node:fs"; console.log(typeof fs?.constants?.F_OK )', + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /number/); // node:fs is an object + assert.strictEqual(code, 0); + }); + + it('should load properly different source when only load changes something', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /load passthru/); + assert.match(stdout, /resolve passthru/); + assert.match(stdout, /foo/); + assert.strictEqual(code, 0); + }); + + it('should result in proper output from multiple changes in resolve hooks', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /resolve 42/); // It did go thru resolve-42 + assert.match(stdout, /foo/); // LIFO, so resolve-foo won + assert.strictEqual(code, 0); + }); + + it('should respect modified context within resolve chain', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-receiving-modified-context.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passing-modified-context.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /bar/); + assert.strictEqual(code, 0); + }); + + it('should result in proper output from multiple changes in resolve hooks', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /resolve foo/); // It did go thru resolve-foo + assert.match(stdout, /42/); // LIFO, so resolve-42 won + assert.strictEqual(code, 0); + }); + + it('should provide the correct "next" fn when multiple calls to next within same loader', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-foo.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-multiple-next-calls.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + const countFoos = stdout.match(/resolve foo/g)?.length; + + assert.strictEqual(stderr, ''); + assert.strictEqual(countFoos, 2); + assert.strictEqual(code, 0); + }); + + it('should use the correct `name` for next\'s function', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /next: nextResolve/); + assert.strictEqual(code, 0); + }); + + it('should throw for incomplete resolve chain, citing errant loader & hook', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-incomplete.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + assert.match(stdout, /resolve passthru/); + assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); + assert.match(stderr, /loader-resolve-incomplete\.mjs/); + assert.match(stderr, /'resolve'/); + assert.strictEqual(code, 1); + }); + + it('should NOT throw when nested resolve hook signaled a short circuit', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-next-modified.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout.trim(), 'foo'); + assert.strictEqual(code, 0); + }); + + it('should NOT throw when nested load hook signaled a short circuit', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-shortcircuit.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-next-modified.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(stderr, ''); + assert.match(stdout, /421/); + assert.strictEqual(code, 0); + }); + + it('should throw when the resolve chain is broken', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-incomplete.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-foo-or-42.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.doesNotMatch(stdout, /resolve passthru/); + assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); + assert.match(stderr, /loader-resolve-incomplete\.mjs/); + assert.match(stderr, /'resolve'/); + assert.strictEqual(code, 1); + }); + + it('should throw for incomplete load chain, citing errant loader & hook', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-incomplete.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.match(stdout, /load passthru/); + assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); + assert.match(stderr, /loader-load-incomplete\.mjs/); + assert.match(stderr, /'load'/); + assert.strictEqual(code, 1); + }); + + it('should throw when the load chain is broken', async () => { + const { code, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-incomplete.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.doesNotMatch(stdout, /load passthru/); + assert.match(stderr, /ERR_LOADER_CHAIN_INCOMPLETE/); + assert.match(stderr, /loader-load-incomplete\.mjs/); + assert.match(stderr, /'load'/); + assert.strictEqual(code, 1); + }); + + it('should throw when invalid `specifier` argument passed to `nextResolve`', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-bad-next-specifier.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(code, 1); + assert.match(stderr, /ERR_INVALID_ARG_TYPE/); + assert.match(stderr, /loader-resolve-bad-next-specifier\.mjs/); + assert.match(stderr, /'resolve' hook's nextResolve\(\) specifier/); + }); + + it('should throw when resolve hook is invalid', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-null-return.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(code, 1); + assert.match(stderr, /ERR_INVALID_RETURN_VALUE/); + assert.match(stderr, /loader-resolve-null-return\.mjs/); + assert.match(stderr, /'resolve' hook's nextResolve\(\)/); + assert.match(stderr, /an object/); + assert.match(stderr, /got null/); + }); + + it('should throw when invalid `context` argument passed to `nextResolve`', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-resolve-bad-next-context.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.match(stderr, /ERR_INVALID_ARG_TYPE/); + assert.match(stderr, /loader-resolve-bad-next-context\.mjs/); + assert.match(stderr, /'resolve' hook's nextResolve\(\) context/); + assert.strictEqual(code, 1); + }); + + it('should throw when load hook is invalid', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-null-return.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.strictEqual(code, 1); + assert.match(stderr, /ERR_INVALID_RETURN_VALUE/); + assert.match(stderr, /loader-load-null-return\.mjs/); + assert.match(stderr, /'load' hook's nextLoad\(\)/); + assert.match(stderr, /an object/); + assert.match(stderr, /got null/); + }); + + it('should throw when invalid `url` argument passed to `nextLoad`', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-bad-next-url.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.match(stderr, /ERR_INVALID_ARG_TYPE/); + assert.match(stderr, /loader-load-bad-next-url\.mjs/); + assert.match(stderr, /'load' hook's nextLoad\(\) url/); + assert.strictEqual(code, 1); + }); + + it('should throw when invalid `url` argument passed to `nextLoad`', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-impersonating-next-url.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.match(stderr, /ERR_INVALID_ARG_VALUE/); + assert.match(stderr, /loader-load-impersonating-next-url\.mjs/); + assert.match(stderr, /'load' hook's nextLoad\(\) url/); + assert.strictEqual(code, 1); + }); + + it('should throw when invalid `context` argument passed to `nextLoad`', async () => { + const { code, stderr } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-passthru.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-load-bad-next-context.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + assert.match(stderr, /ERR_INVALID_ARG_TYPE/); + assert.match(stderr, /loader-load-bad-next-context\.mjs/); + assert.match(stderr, /'load' hook's nextLoad\(\) context/); + assert.strictEqual(code, 1); + }); +}); diff --git a/test/es-module/test-esm-loader-http-imports.mjs b/test/es-module/test-esm-loader-http-imports.mjs index 5f6cc47f388271..ad42829f5d1cee 100644 --- a/test/es-module/test-esm-loader-http-imports.mjs +++ b/test/es-module/test-esm-loader-http-imports.mjs @@ -1,10 +1,11 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import fixtures from '../common/fixtures.js'; -import { strictEqual } from 'node:assert'; -import { spawn } from 'node:child_process'; +import assert from 'node:assert'; import http from 'node:http'; import path from 'node:path'; +import { execPath } from 'node:process'; import { promisify } from 'node:util'; +import { describe, it } from 'node:test'; const files = { @@ -40,33 +41,31 @@ const { port, } = server.address(); -{ // Verify nested HTTP imports work - const child = spawn( // ! `spawn` MUST be used (vs `spawnSync`) to avoid blocking the event loop - process.execPath, - [ - '--no-warnings', - '--loader', - fixtures.fileURL('es-module-loaders', 'http-loader.mjs'), - '--input-type=module', - '--eval', - `import * as main from 'http://${host}:${port}/main.mjs'; console.log(main)`, - ] - ); +/** + * ! If more cases are added to this test, they cannot (yet) be concurrent because there is no + * ! `afterAll` teardown in which to close the server. + */ - let stderr = ''; - let stdout = ''; +describe('ESM: http import via loader', { concurrency: false }, () => { + it('should work', async () => { + // ! MUST NOT use spawnSync to avoid blocking the event loop + const { code, signal, stderr, stdout } = await spawnPromisified( + execPath, + [ + '--no-warnings', + '--loader', + fixtures.fileURL('es-module-loaders', 'http-loader.mjs'), + '--input-type=module', + '--eval', + `import * as main from 'http://${host}:${port}/main.mjs'; console.log(main)`, + ] + ); - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => stderr += data); - child.stdout.setEncoding('utf8'); - child.stdout.on('data', (data) => stdout += data); + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, '[Module: null prototype] { sum: [Function: sum] }\n'); + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); - child.on('close', mustCall((code, signal) => { - strictEqual(stderr, ''); - strictEqual(stdout, '[Module: null prototype] { sum: [Function: sum] }\n'); - strictEqual(code, 0); - strictEqual(signal, null); - - server.close(); - })); -} + server.close(); // ! This MUST come after the final test, but inside the async `it` function + }); +}); diff --git a/test/es-module/test-esm-loader-not-found.mjs b/test/es-module/test-esm-loader-not-found.mjs index 275f0b0f1e8515..2abaf3078d113d 100644 --- a/test/es-module/test-esm-loader-not-found.mjs +++ b/test/es-module/test-esm-loader-not-found.mjs @@ -1,27 +1,24 @@ -import { mustCall } from '../common/index.mjs'; -import { path } from '../common/fixtures.mjs'; -import { match, ok, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(execPath, [ - '--experimental-loader', - 'i-dont-exist', - path('print-error-message.js'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); +describe('ESM: nonexistent loader', () => { + it('should throw', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-loader', + 'i-dont-exist', + fixtures.path('print-error-message.js'), + ]); + + assert.notStrictEqual(code, 0); - // Error [ERR_MODULE_NOT_FOUND]: Cannot find package 'i-dont-exist' - // imported from - match(stderr, /ERR_MODULE_NOT_FOUND/); - match(stderr, /'i-dont-exist'/); + // Error [ERR_MODULE_NOT_FOUND]: Cannot find package 'i-dont-exist' imported from + assert.match(stderr, /ERR_MODULE_NOT_FOUND/); + assert.match(stderr, /'i-dont-exist'/); - ok(!stderr.includes('Bad command or file name')); -})); + assert.ok(!stderr.includes('Bad command or file name')); + }); +}); diff --git a/test/es-module/test-esm-loader-obsolete-hooks.mjs b/test/es-module/test-esm-loader-obsolete-hooks.mjs index eff4104fc265ae..fa0baef8a216b7 100644 --- a/test/es-module/test-esm-loader-obsolete-hooks.mjs +++ b/test/es-module/test-esm-loader-obsolete-hooks.mjs @@ -1,30 +1,28 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fileURL, path } from '../common/fixtures.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { match, notStrictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(execPath, [ - '--no-warnings', - '--throw-deprecation', - '--experimental-loader', - fileURL('es-module-loaders', 'hooks-obsolete.mjs').href, - path('print-error-message.js'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); +describe('ESM: deprecation warnings for obsolete hooks', { concurrency: true }, () => { + it(async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--no-warnings', + '--throw-deprecation', + '--experimental-loader', + fileURL('es-module-loaders', 'hooks-obsolete.mjs').href, + path('print-error-message.js'), + ]); + + // DeprecationWarning: Obsolete loader hook(s) supplied and will be ignored: + // dynamicInstantiate, getFormat, getSource, transformSource + match(stderr, /DeprecationWarning:/); + match(stderr, /dynamicInstantiate/); + match(stderr, /getFormat/); + match(stderr, /getSource/); + match(stderr, /transformSource/); - // DeprecationWarning: Obsolete loader hook(s) supplied and will be ignored: - // dynamicInstantiate, getFormat, getSource, transformSource - match(stderr, /DeprecationWarning:/); - match(stderr, /dynamicInstantiate/); - match(stderr, /getFormat/); - match(stderr, /getSource/); - match(stderr, /transformSource/); -})); + notStrictEqual(code, 0); + }); +}); diff --git a/test/es-module/test-esm-loader-thenable.mjs b/test/es-module/test-esm-loader-thenable.mjs index c8c3ec2778e7e9..5e802b8393bcd4 100644 --- a/test/es-module/test-esm-loader-thenable.mjs +++ b/test/es-module/test-esm-loader-thenable.mjs @@ -1,65 +1,43 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fileURL, path } from '../common/fixtures.mjs'; import { match, ok, notStrictEqual, strictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -{ - const child = spawn(execPath, [ - '--experimental-loader', - fileURL('es-module-loaders', 'thenable-load-hook.mjs').href, - path('es-modules', 'test-esm-ok.mjs'), - ]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { +describe('ESM: thenable loader hooks', { concurrency: true }, () => { + it('should behave as a normal promise resolution', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-loader', + fileURL('es-module-loaders', 'thenable-load-hook.mjs').href, + path('es-modules', 'test-esm-ok.mjs'), + ]); + strictEqual(code, 0); ok(!stderr.includes('must not call')); - })); -} + }); -{ - const child = spawn(execPath, [ - '--experimental-loader', - fileURL('es-module-loaders', 'thenable-load-hook-rejected.mjs').href, - path('es-modules', 'test-esm-ok.mjs'), - ]); + it('should crash the node process rejection with an error', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-loader', + fileURL('es-module-loaders', 'thenable-load-hook-rejected.mjs').href, + path('es-modules', 'test-esm-ok.mjs'), + ]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { notStrictEqual(code, 0); - match(stderr, /\sError: must crash the process\r?\n/); - ok(!stderr.includes('must not call')); - })); -} + }); -{ - const child = spawn(execPath, [ - '--experimental-loader', - fileURL('es-module-loaders', 'thenable-load-hook-rejected-no-arguments.mjs').href, - path('es-modules', 'test-esm-ok.mjs'), - ]); + it('should just reject without an error (but NOT crash the node process)', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-loader', + fileURL('es-module-loaders', 'thenable-load-hook-rejected-no-arguments.mjs').href, + path('es-modules', 'test-esm-ok.mjs'), + ]); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { notStrictEqual(code, 0); - match(stderr, /\sundefined\r?\n/); - ok(!stderr.includes('must not call')); - })); -} + }); +}); diff --git a/test/es-module/test-esm-loader-with-syntax-error.mjs b/test/es-module/test-esm-loader-with-syntax-error.mjs index d973e72975e88f..0ed995ad510ee7 100644 --- a/test/es-module/test-esm-loader-with-syntax-error.mjs +++ b/test/es-module/test-esm-loader-with-syntax-error.mjs @@ -1,24 +1,20 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fileURL, path } from '../common/fixtures.mjs'; -import { match, ok, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { match, ok, notStrictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(execPath, [ - '--experimental-loader', - fileURL('es-module-loaders', 'syntax-error.mjs').href, - path('print-error-message.js'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); - - match(stderr, /SyntaxError:/); +describe('ESM: loader with syntax error', { concurrency: true }, () => { + it('should crash the node process', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-loader', + fileURL('es-module-loaders', 'syntax-error.mjs').href, + path('print-error-message.js'), + ]); - ok(!stderr.includes('Bad command or file name')); -})); + match(stderr, /SyntaxError:/); + ok(!stderr.includes('Bad command or file name')); + notStrictEqual(code, 0); + }); +}); diff --git a/test/es-module/test-esm-module-not-found-commonjs-hint.mjs b/test/es-module/test-esm-module-not-found-commonjs-hint.mjs index 58f70d0b685391..51633564f81458 100644 --- a/test/es-module/test-esm-module-not-found-commonjs-hint.mjs +++ b/test/es-module/test-esm-module-not-found-commonjs-hint.mjs @@ -1,35 +1,34 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fixturesDir } from '../common/fixtures.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { match, notStrictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -[ - { - input: 'import "./print-error-message"', - // Did you mean to import ../print-error-message.js? - expected: / \.\.\/print-error-message\.js\?/, - }, - { - input: 'import obj from "some_module/obj"', - expected: / some_module\/obj\.js\?/, - }, -].forEach(({ input, expected }) => { - const child = spawn(execPath, [ - '--input-type=module', - '--eval', - input, - ], { - cwd: fixturesDir, - }); - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); +describe('ESM: module not found hint', { concurrency: true }, () => { + for ( + const { input, expected } + of [ + { + input: 'import "./print-error-message"', + // Did you mean to import ../print-error-message.js? + expected: / \.\.\/print-error-message\.js\?/, + }, + { + input: 'import obj from "some_module/obj"', + expected: / some_module\/obj\.js\?/, + }, + ] + ) it('should cite a variant form', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--input-type=module', + '--eval', + input, + ], { + cwd: fixturesDir, + }); + match(stderr, expected); - })); + notStrictEqual(code, 0); + }); }); diff --git a/test/es-module/test-esm-non-js.mjs b/test/es-module/test-esm-non-js.mjs index 749cd0b6132086..2630d3136a42ba 100644 --- a/test/es-module/test-esm-non-js.mjs +++ b/test/es-module/test-esm-non-js.mjs @@ -1,23 +1,20 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { fileURL } from '../common/fixtures.mjs'; -import { match, strictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { match, strictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -// Verify non-js extensions fail for ESM -const child = spawn(execPath, [ - '--input-type=module', - '--eval', - `import ${JSON.stringify(fileURL('es-modules', 'file.unknown'))}`, -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; +describe('ESM: non-js extensions fail', { concurrency: true }, () => { + it(async () => { + const { code, stderr, signal } = await spawnPromisified(execPath, [ + '--input-type=module', + '--eval', + `import ${JSON.stringify(fileURL('es-modules', 'file.unknown'))}`, + ]); + + match(stderr, /ERR_UNKNOWN_FILE_EXTENSION/); + strictEqual(code, 1); + strictEqual(signal, null); + }); }); -child.on('close', mustCall((code, signal) => { - strictEqual(code, 1); - strictEqual(signal, null); - match(stderr, /ERR_UNKNOWN_FILE_EXTENSION/); -})); diff --git a/test/es-module/test-esm-nowarn-exports.mjs b/test/es-module/test-esm-nowarn-exports.mjs index 57d5bc58c72356..695e924e03ccbe 100644 --- a/test/es-module/test-esm-nowarn-exports.mjs +++ b/test/es-module/test-esm-nowarn-exports.mjs @@ -1,25 +1,19 @@ -import '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { path } from '../common/fixtures.mjs'; -import { strictEqual, ok } from 'assert'; -import { spawn } from 'child_process'; +import { strictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(process.execPath, [ - '--experimental-import-meta-resolve', - path('/es-modules/import-resolve-exports.mjs'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', (code, signal) => { - strictEqual(code, 0); - strictEqual(signal, null); - ok(!stderr.toString().includes( - 'ExperimentalWarning: The ESM module loader is experimental' - )); - ok(!stderr.toString().includes( - 'ExperimentalWarning: Conditional exports' - )); +describe('ESM: experiemental warning for import.meta.resolve', { concurrency: true }, () => { + it('should not warn when caught', async () => { + const { code, signal, stderr } = await spawnPromisified(execPath, [ + '--experimental-import-meta-resolve', + path('es-modules/import-resolve-exports.mjs'), + ]); + + strictEqual(stderr, ''); + strictEqual(code, 0); + strictEqual(signal, null); + }); }); diff --git a/test/es-module/test-esm-preserve-symlinks-main.js b/test/es-module/test-esm-preserve-symlinks-main.js index b8fb301e02d85b..6f921f656fe22f 100644 --- a/test/es-module/test-esm-preserve-symlinks-main.js +++ b/test/es-module/test-esm-preserve-symlinks-main.js @@ -39,15 +39,15 @@ function doTest(flags, done) { // dictates that it'll resolve relative imports in the main file relative to // the symlink, and not relative to the symlink target; the file structure set // up above requires this to not crash when loading ./submodule_link.js - spawn(process.execPath, - flags.concat([ - '--preserve-symlinks', - '--preserve-symlinks-main', entry_link_absolute_path, - ]), - { stdio: 'inherit' }).on('exit', (code) => { - assert.strictEqual(code, 0); - done(); - }); + spawn(process.execPath, [ + '--preserve-symlinks', + '--preserve-symlinks-main', + entry_link_absolute_path, + ], { stdio: 'inherit' }) + .on('exit', (code) => { + assert.strictEqual(code, 0); + done(); + }); } // First test the commonjs module loader diff --git a/test/es-module/test-esm-repl-imports.js b/test/es-module/test-esm-repl-imports.js index d2b39e05fb0588..9547824756f5ec 100644 --- a/test/es-module/test-esm-repl-imports.js +++ b/test/es-module/test-esm-repl-imports.js @@ -1,19 +1,28 @@ 'use strict'; + const { mustCall } = require('../common'); -const assert = require('assert'); const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); +const assert = require('node:assert'); +const { spawn } = require('node:child_process'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); -const child = spawn(process.execPath, [ - '--interactive', -], { - cwd: fixtures.path('es-modules', 'pkgimports'), -}); -child.stdin.end( - 'try{require("#test");await import("#test")}catch{process.exit(-1)}' -); +describe('ESM: REPL runs', { concurrency: true }, () => { + it((context, done) => { + const child = spawn(execPath, [ + '--interactive', + ], { + cwd: fixtures.path('es-modules', 'pkgimports'), + }); -child.on('exit', mustCall((code) => { - assert.strictEqual(code, 0); -})); + child.stdin.end( + 'try{require("#test");await import("#test")}catch{process.exit(-1)}' + ); + + child.on('exit', mustCall((code) => { + assert.strictEqual(code, 0); + done(); + })); + }); +}); diff --git a/test/es-module/test-esm-specifiers.mjs b/test/es-module/test-esm-specifiers.mjs index bc6125f5f94ad9..670cd93c9b0b28 100644 --- a/test/es-module/test-esm-specifiers.mjs +++ b/test/es-module/test-esm-specifiers.mjs @@ -1,62 +1,79 @@ -// Flags: --experimental-specifier-resolution=node -import { mustNotCall } from '../common/index.mjs'; -import assert from 'assert'; -import path from 'path'; -import { spawn } from 'child_process'; -import { fileURLToPath } from 'url'; - -// commonJS index.js -import commonjs from '../fixtures/es-module-specifiers/package-type-commonjs'; -// esm index.js -import module from '../fixtures/es-module-specifiers/package-type-module'; -// Notice the trailing slash -import success, { explicit, implicit, implicitModule, getImplicitCommonjs } - from '../fixtures/es-module-specifiers/'; - -assert.strictEqual(commonjs, 'commonjs'); -assert.strictEqual(module, 'module'); -assert.strictEqual(success, 'success'); -assert.strictEqual(explicit, 'esm'); -assert.strictEqual(implicit, 'cjs'); -assert.strictEqual(implicitModule, 'cjs'); - -async function main() { - try { - await import('../fixtures/es-module-specifiers/do-not-exist.js'); - } catch (e) { - // Files that do not exist should throw - assert.strictEqual(e.name, 'Error'); - } - try { - await getImplicitCommonjs(); - } catch (e) { - // Legacy loader cannot resolve .mjs automatically from main - assert.strictEqual(e.name, 'Error'); - } -} - -main().catch(mustNotCall); - -// Test path from command line arguments -[ - 'package-type-commonjs', - 'package-type-module', - '/', - '/index', -].forEach((item) => { - const modulePath = path.join( - fileURLToPath(import.meta.url), - '../../fixtures/es-module-specifiers', - item, - ); - [ - '--experimental-specifier-resolution', - '--es-module-specifier-resolution', - ].forEach((option) => { - spawn(process.execPath, - [`${option}=node`, modulePath], - { stdio: 'inherit' }).on('exit', (code) => { - assert.strictEqual(code, 0); - }); +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import { match, strictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; + + +describe('ESM: specifier-resolution=node', { concurrency: true }, () => { + it(async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--experimental-specifier-resolution=node', + '--input-type=module', + '--eval', + [ + 'import { strictEqual } from "node:assert";', + // commonJS index.js + `import commonjs from ${JSON.stringify(fixtures.fileURL('es-module-specifiers/package-type-commonjs'))};`, + // esm index.js + `import module from ${JSON.stringify(fixtures.fileURL('es-module-specifiers/package-type-module'))};`, + // Notice the trailing slash + `import success, { explicit, implicit, implicitModule } from ${JSON.stringify(fixtures.fileURL('es-module-specifiers/'))};`, + 'strictEqual(commonjs, "commonjs");', + 'strictEqual(module, "module");', + 'strictEqual(success, "success");', + 'strictEqual(explicit, "esm");', + 'strictEqual(implicit, "cjs");', + 'strictEqual(implicitModule, "cjs");', + ].join('\n'), + ]); + + strictEqual(stderr, ''); + strictEqual(stdout, ''); + strictEqual(code, 0); + }); + + it('should throw when the file doesn\'t exist', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-module-specifiers/do-not-exist.js'), + ]); + + match(stderr, /Cannot find module/); + strictEqual(stdout, ''); + strictEqual(code, 1); + }); + + it('should throw when the omitted file extension is .mjs (legacy loader doesn\'t support it)', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--experimental-specifier-resolution=node', + '--input-type=module', + '--eval', + `import whatever from ${JSON.stringify(fixtures.fileURL('es-module-specifiers/implicit-main-type-commonjs'))};`, + ]); + + match(stderr, /ERR_MODULE_NOT_FOUND/); + strictEqual(stdout, ''); + strictEqual(code, 1); + }); + + for ( + const item of [ + 'package-type-commonjs', + 'package-type-module', + '/', + '/index', + ] + ) it('should ', async () => { + const { code } = await spawnPromisified(execPath, [ + '--no-warnings', + '--experimental-specifier-resolution=node', + '--es-module-specifier-resolution=node', + fixtures.path('es-module-specifiers', item), + ]); + + strictEqual(code, 0); }); }); diff --git a/test/es-module/test-esm-syntax-error.mjs b/test/es-module/test-esm-syntax-error.mjs index a8c019171717dd..de87187b069441 100644 --- a/test/es-module/test-esm-syntax-error.mjs +++ b/test/es-module/test-esm-syntax-error.mjs @@ -1,19 +1,16 @@ -import { mustCall } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import { path } from '../common/fixtures.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; +import { match, notStrictEqual } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -const child = spawn(execPath, [ - path('es-module-loaders', 'syntax-error.mjs'), -]); -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; +describe('ESM: importing a module with syntax error(s)', { concurrency: true }, () => { + it('should throw', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + path('es-module-loaders', 'syntax-error.mjs'), + ]); + match(stderr, /SyntaxError:/); + notStrictEqual(code, 0); + }); }); -child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); - match(stderr, /SyntaxError:/); -})); diff --git a/test/es-module/test-esm-tla-unfinished.mjs b/test/es-module/test-esm-tla-unfinished.mjs index a7b6e620d0620a..48bc4d77f42b4e 100644 --- a/test/es-module/test-esm-tla-unfinished.mjs +++ b/test/es-module/test-esm-tla-unfinished.mjs @@ -1,7 +1,9 @@ -import '../common/index.mjs'; -import assert from 'assert'; -import child_process from 'child_process'; +import { spawnPromisified } from '../common/index.mjs'; import fixtures from '../common/fixtures.js'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; + const commonArgs = [ '--no-warnings', @@ -9,102 +11,117 @@ const commonArgs = [ '--eval', ]; -{ - // Unresolved TLA promise, --eval - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - [...commonArgs, 'await new Promise(() => {})'], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [13, '', '']); -} - -{ - // Rejected TLA promise, --eval - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - [...commonArgs, 'await Promise.reject(new Error("Xyz"))'], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout], [1, '']); - assert.match(stderr, /Error: Xyz/); -} - -{ - // Unresolved TLA promise with explicit exit code, --eval - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - [ +describe('ESM: unsettled and rejected promises', { concurrency: true }, () => { + it('should exit for an unsettled TLA promise via --eval', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + ...commonArgs, + 'await new Promise(() => {})', + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 13); + }); + + it('should throw for a rejected TLA promise via --eval', async () => { + // Rejected TLA promise, --eval + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + ...commonArgs, + 'await Promise.reject(new Error("Xyz"))', + ]); + + assert.match(stderr, /Error: Xyz/); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 1); + }); + + it('should exit for an unsettled TLA promise and respect explicit exit code via --eval', async () => { + // Rejected TLA promise, --eval + const { code, stderr, stdout } = await spawnPromisified(execPath, [ ...commonArgs, 'process.exitCode = 42;await new Promise(() => {})', - ], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [42, '', '']); -} - -{ - // Rejected TLA promise with explicit exit code, --eval - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - [ + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 42); + }); + + it('should throw for a rejected TLA promise and ignore explicit exit code via --eval', async () => { + // Rejected TLA promise, --eval + const { code, stderr, stdout } = await spawnPromisified(execPath, [ ...commonArgs, 'process.exitCode = 42;await Promise.reject(new Error("Xyz"))', - ], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout], [1, '']); - assert.match(stderr, /Error: Xyz/); -} - -{ - // Unresolved TLA promise, module file - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/unresolved.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [13, '', '']); -} - -{ - // Rejected TLA promise, module file - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/rejected.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout], [1, '']); - assert.match(stderr, /Error: Xyz/); -} - -{ - // Unresolved TLA promise, module file - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/unresolved-withexitcode.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [42, '', '']); -} - -{ - // Rejected TLA promise, module file - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/rejected-withexitcode.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout], [1, '']); - assert.match(stderr, /Error: Xyz/); -} - -{ - // Calling process.exit() in .mjs should return status 0 - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/process-exit.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [0, '', '']); -} - -{ - // Calling process.exit() in worker thread shouldn't influence main thread - const { status, stdout, stderr } = child_process.spawnSync( - process.execPath, - ['--no-warnings', fixtures.path('es-modules/tla/unresolved-with-worker-process-exit.mjs')], - { encoding: 'utf8' }); - assert.deepStrictEqual([status, stdout, stderr], [13, '', '']); -} + ]); + + assert.match(stderr, /Error: Xyz/); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 1); + }); + + it('should exit for an unsettled TLA promise via stdin', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/unresolved.mjs'), + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 13); + }); + + it('should throw for a rejected TLA promise via stdin', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/rejected.mjs'), + ]); + + assert.match(stderr, /Error: Xyz/); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 1); + }); + + it('should exit for an unsettled TLA promise and respect explicit exit code via stdin', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/unresolved-withexitcode.mjs'), + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 42); + }); + + it('should throw for a rejected TLA promise and ignore explicit exit code via stdin', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/rejected-withexitcode.mjs'), + ]); + + assert.match(stderr, /Error: Xyz/); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 1); + }); + + it('should exit successfully when calling `process.exit()` in `.mjs` file', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/process-exit.mjs'), + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 0); + }); + + it('should be unaffected by `process.exit()` in worker thread', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/unresolved-with-worker-process-exit.mjs'), + ]); + + assert.strictEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.strictEqual(code, 13); + }); +}); diff --git a/test/es-module/test-esm-unknown-or-no-extension.js b/test/es-module/test-esm-unknown-or-no-extension.js index 40f840ad670cf3..3f0660e5aa9225 100644 --- a/test/es-module/test-esm-unknown-or-no-extension.js +++ b/test/es-module/test-esm-unknown-or-no-extension.js @@ -1,40 +1,36 @@ 'use strict'; -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { spawn } = require('child_process'); -const assert = require('assert'); +const { spawnPromisified } = require('../common'); +const fixtures = require('../common/fixtures.js'); +const assert = require('node:assert'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); + // In a "type": "module" package scope, files with unknown extensions or no // extensions should throw; both when used as a main entry point and also when // referenced via `import`. +describe('ESM: extensionless and unknown specifiers', { concurrency: true }, () => { + for ( + const fixturePath of [ + '/es-modules/package-type-module/noext-esm', + '/es-modules/package-type-module/imports-noext.mjs', + '/es-modules/package-type-module/extension.unknown', + '/es-modules/package-type-module/imports-unknownext.mjs', + ] + ) { + it('should throw', async () => { + const entry = fixtures.path(fixturePath); + const { code, signal, stderr, stdout } = await spawnPromisified(execPath, [entry]); -[ - '/es-modules/package-type-module/noext-esm', - '/es-modules/package-type-module/imports-noext.mjs', - '/es-modules/package-type-module/extension.unknown', - '/es-modules/package-type-module/imports-unknownext.mjs', -].forEach((fixturePath) => { - const entry = fixtures.path(fixturePath); - const child = spawn(process.execPath, [entry]); - let stdout = ''; - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stdout.setEncoding('utf8'); - child.stdout.on('data', (data) => { - stdout += data; - }); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', common.mustCall((code, signal) => { - assert.strictEqual(code, 1); - assert.strictEqual(signal, null); - assert.strictEqual(stdout, ''); - assert.ok(stderr.includes('ERR_UNKNOWN_FILE_EXTENSION')); - if (fixturePath.includes('noext')) { - // Check for explanation to users - assert.ok(stderr.includes('extensionless')); - } - })); + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + assert.strictEqual(stdout, ''); + assert.ok(stderr.includes('ERR_UNKNOWN_FILE_EXTENSION')); + if (fixturePath.includes('noext')) { + // Check for explanation to users + assert.ok(stderr.includes('extensionless')); + } + }); + } }); diff --git a/test/es-module/test-esm-wasm.mjs b/test/es-module/test-esm-wasm.mjs index 01717c47714f6a..fac1d4b2837df0 100644 --- a/test/es-module/test-esm-wasm.mjs +++ b/test/es-module/test-esm-wasm.mjs @@ -1,37 +1,43 @@ -// Flags: --experimental-wasm-modules -import '../common/index.mjs'; -import { path } from '../common/fixtures.mjs'; -import { add, addImported } from '../fixtures/es-modules/simple.wasm'; -import { state } from '../fixtures/es-modules/wasm-dep.mjs'; -import { strictEqual, ok } from 'assert'; -import { spawn } from 'child_process'; +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import { strictEqual, match } from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; -strictEqual(state, 'WASM Start Executed'); -strictEqual(add(10, 20), 30); +describe('ESM: WASM modules', { concurrency: true }, () => { + it('should load exports', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--experimental-wasm-modules', + '--input-type=module', + '--eval', + [ + 'import { strictEqual, match } from "node:assert";', + `import { add, addImported } from ${JSON.stringify(fixtures.fileURL('es-modules/simple.wasm'))};`, + `import { state } from ${JSON.stringify(fixtures.fileURL('es-modules/wasm-dep.mjs'))};`, + 'strictEqual(state, "WASM Start Executed");', + 'strictEqual(add(10, 20), 30);', + 'strictEqual(addImported(0), 42);', + 'strictEqual(state, "WASM JS Function Executed");', + 'strictEqual(addImported(1), 43);', + ].join('\n'), + ]); -strictEqual(addImported(0), 42); + strictEqual(stderr, ''); + strictEqual(stdout, ''); + strictEqual(code, 0); + }); -strictEqual(state, 'WASM JS Function Executed'); + it('should emit experimental warning', async () => { + const { code, signal, stderr } = await spawnPromisified(execPath, [ + '--experimental-wasm-modules', + fixtures.path('es-modules/wasm-modules.mjs'), + ]); -strictEqual(addImported(1), 43); - -// Test warning message -const child = spawn(process.execPath, [ - '--experimental-wasm-modules', - path('/es-modules/wasm-modules.mjs'), -]); - -let stderr = ''; -child.stderr.setEncoding('utf8'); -child.stderr.on('data', (data) => { - stderr += data; -}); -child.on('close', (code, signal) => { - strictEqual(code, 0); - strictEqual(signal, null); - ok(stderr.toString().includes( - 'ExperimentalWarning: Importing WebAssembly modules is ' + - 'an experimental feature. This feature could change at any time' - )); + strictEqual(code, 0); + strictEqual(signal, null); + match(stderr, /ExperimentalWarning/); + match(stderr, /WebAssembly/); + }); }); diff --git a/test/es-module/test-http-imports-cli.mjs b/test/es-module/test-http-imports-cli.mjs index 67cefd69ddd889..7deb31a288c7f7 100644 --- a/test/es-module/test-http-imports-cli.mjs +++ b/test/es-module/test-http-imports-cli.mjs @@ -1,48 +1,48 @@ -import { mustCall } from '../common/index.mjs'; -import { match, notStrictEqual } from 'assert'; -import { spawn } from 'child_process'; -import { execPath } from 'process'; - -{ - const child = spawn(execPath, [ - '--experimental-network-imports', - '--input-type=module', - '-e', - 'import "http://example.com"', - ]); - - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; - }); - child.on('close', mustCall((code, _signal) => { +import { mustCall, spawnPromisified } from '../common/index.mjs'; +import { ok, match, notStrictEqual } from 'node:assert'; +import { spawn as spawnAsync } from 'node:child_process'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; + + +describe('ESM: http import via CLI', { concurrency: true }, () => { + const disallowedSpecifier = 'http://example.com'; + + it('should throw disallowed error for insecure protocol', async () => { + const { code, stderr } = await spawnPromisified(execPath, [ + '--experimental-network-imports', + '--input-type=module', + '--eval', + `import ${JSON.stringify(disallowedSpecifier)}`, + ]); + notStrictEqual(code, 0); // [ERR_NETWORK_IMPORT_DISALLOWED]: import of 'http://example.com/' by // …/[eval1] is not supported: http can only be used to load local // resources (use https instead). - match(stderr, /[ERR_NETWORK_IMPORT_DISALLOWED]/); - })); -} -{ - const child = spawn(execPath, [ - '--experimental-network-imports', - '--input-type=module', - ]); - child.stdin.end('import "http://example.com"'); - - let stderr = ''; - child.stderr.setEncoding('utf8'); - child.stderr.on('data', (data) => { - stderr += data; + match(stderr, /ERR_NETWORK_IMPORT_DISALLOWED/); + ok(stderr.includes(disallowedSpecifier)); }); - child.on('close', mustCall((code, _signal) => { - notStrictEqual(code, 0); - // [ERR_NETWORK_IMPORT_DISALLOWED]: import of 'http://example.com/' by - // …/[stdin] is not supported: http can only be used to load local - // resources (use https instead). - match(stderr, /[ERR_NETWORK_IMPORT_DISALLOWED]/); - })); -} + it('should throw disallowed error for insecure protocol in REPL', () => { + const child = spawnAsync(execPath, [ + '--experimental-network-imports', + '--input-type=module', + ]); + child.stdin.end(`import ${JSON.stringify(disallowedSpecifier)}`); + + let stderr = ''; + child.stderr.setEncoding('utf8'); + child.stderr.on('data', (data) => stderr += data); + child.on('close', mustCall((code, _signal) => { + notStrictEqual(code, 0); + + // [ERR_NETWORK_IMPORT_DISALLOWED]: import of 'http://example.com/' by + // …/[stdin] is not supported: http can only be used to load local + // resources (use https instead). + match(stderr, /\[ERR_NETWORK_IMPORT_DISALLOWED\]/); + ok(stderr.includes(disallowedSpecifier)); + })); + }); +}); diff --git a/test/fixtures/es-module-loaders/assertionless-json-import.mjs b/test/fixtures/es-module-loaders/assertionless-json-import.mjs index c5c2fadf28fb58..07656d4ec40fa3 100644 --- a/test/fixtures/es-module-loaders/assertionless-json-import.mjs +++ b/test/fixtures/es-module-loaders/assertionless-json-import.mjs @@ -4,14 +4,14 @@ const JSON_URL_PATTERN = /\.json(\?[^#]*)?(#.*)?$/; export function resolve(url, context, next) { // Mutation from resolve hook should be discarded. context.importAssertions.type = 'whatever'; - return next(url, context); + return next(url); } export function load(url, context, next) { - if (context.importAssertions.type == null && + if (context.importAssertions.type == null && (DATA_URL_PATTERN.test(url) || JSON_URL_PATTERN.test(url))) { const { importAssertions } = context; importAssertions.type = 'json'; } - return next(url, context); + return next(url); } diff --git a/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs b/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs index e303ec196f6c6d..8c317c1b7ce31e 100644 --- a/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs +++ b/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs @@ -35,7 +35,7 @@ export function load(url, context, next) { format: 'module', }; } - return next(url, context); + return next(url); } function generateBuiltinModule(builtinName) { diff --git a/test/fixtures/es-module-loaders/example-loader.mjs b/test/fixtures/es-module-loaders/example-loader.mjs index f87054c8b70502..77d44d55f91523 100644 --- a/test/fixtures/es-module-loaders/example-loader.mjs +++ b/test/fixtures/es-module-loaders/example-loader.mjs @@ -8,7 +8,7 @@ const JS_EXTENSIONS = new Set(['.js', '.mjs']); const baseURL = new URL('file://'); baseURL.pathname = process.cwd() + '/'; -export function resolve(specifier, { parentURL = baseURL }, defaultResolve) { +export function resolve(specifier, { parentURL = baseURL }, next) { if (builtinModules.includes(specifier)) { return { shortCircuit: true, @@ -17,7 +17,7 @@ export function resolve(specifier, { parentURL = baseURL }, defaultResolve) { } if (/^\.{1,2}[/]/.test(specifier) !== true && !specifier.startsWith('file:')) { // For node_modules support: - // return defaultResolve(specifier, {parentURL}, defaultResolve); + // return next(specifier); throw new Error( `imports must be URLs or begin with './', or '../'; '${specifier}' does not`); } diff --git a/test/fixtures/es-module-loaders/hook-resolve-type.mjs b/test/fixtures/es-module-loaders/hook-resolve-type.mjs index 5068d6265c57b2..b1f5606c0e4a0a 100644 --- a/test/fixtures/es-module-loaders/hook-resolve-type.mjs +++ b/test/fixtures/es-module-loaders/hook-resolve-type.mjs @@ -3,7 +3,7 @@ let importedCJS = 0; global.getModuleTypeStats = () => { return {importedESM, importedCJS} }; export async function load(url, context, next) { - return next(url, context, next); + return next(url); } export async function resolve(specifier, context, next) { diff --git a/test/fixtures/es-module-loaders/hooks-custom.mjs b/test/fixtures/es-module-loaders/hooks-custom.mjs index 4c4014db01ef3f..65dba3535c2d95 100644 --- a/test/fixtures/es-module-loaders/hooks-custom.mjs +++ b/test/fixtures/es-module-loaders/hooks-custom.mjs @@ -56,10 +56,10 @@ export function load(url, context, next) { source: `export const message = 'Woohoo!'.toUpperCase();`, }; - return next(url, context, next); + return next(url); } -export function resolve(specifier, context, next) { +export function resolve(specifier, { importAssertions }, next) { let format = ''; if (specifier === 'esmHook/format.false') format = false; @@ -70,8 +70,8 @@ export function resolve(specifier, context, next) { format, shortCircuit: true, url: pathToFileURL(specifier).href, - importAssertions: context.importAssertions, + importAssertions, }; - return next(specifier, context, next); + return next(specifier); } diff --git a/test/fixtures/es-module-loaders/hooks-obsolete.mjs b/test/fixtures/es-module-loaders/hooks-obsolete.mjs index 9d12251923d7e9..bb10ef8ef4b29a 100644 --- a/test/fixtures/es-module-loaders/hooks-obsolete.mjs +++ b/test/fixtures/es-module-loaders/hooks-obsolete.mjs @@ -4,19 +4,19 @@ export function getSource() {} export function transformSource() {} -export function load(url, context, next) { - if (url === 'whatever') return { - format: 'module', - source: '', +export function resolve(specifier, context, next) { + if (specifier === 'whatever') return { + url: specifier, }; - return next(url, context, next); + return next(specifier); } -export function resolve(specifier, context, next) { - if (specifier === 'whatever') return { - url: specifier, +export function load(url, context, next) { + if (url === 'whatever') return { + format: 'module', + source: '', }; - return next(specifier, context, next); + return next(url); } diff --git a/test/fixtures/es-module-loaders/http-loader.mjs b/test/fixtures/es-module-loaders/http-loader.mjs index f0add5d5b419f8..8096dd9bb73a4c 100644 --- a/test/fixtures/es-module-loaders/http-loader.mjs +++ b/test/fixtures/es-module-loaders/http-loader.mjs @@ -15,7 +15,7 @@ export function resolve(specifier, context, nextResolve) { }; } - return nextResolve(specifier, context); + return nextResolve(specifier); } export function load(url, context, nextLoad) { @@ -36,5 +36,5 @@ export function load(url, context, nextLoad) { }); } - return nextLoad(url, context); + return nextLoad(url); } diff --git a/test/fixtures/es-module-loaders/loader-get-format.mjs b/test/fixtures/es-module-loaders/loader-get-format.mjs deleted file mode 100644 index 7ade70fca0ebe6..00000000000000 --- a/test/fixtures/es-module-loaders/loader-get-format.mjs +++ /dev/null @@ -1,10 +0,0 @@ -export async function getFormat(url, context, defaultGetFormat) { - try { - if (new URL(url).pathname.endsWith('.unknown')) { - return { - format: 'module' - }; - } - } catch {} - return defaultGetFormat(url, context, defaultGetFormat); -} diff --git a/test/fixtures/es-module-loaders/loader-invalid-format.mjs b/test/fixtures/es-module-loaders/loader-invalid-format.mjs index 438d50dacba433..e7dd06c108ba1d 100644 --- a/test/fixtures/es-module-loaders/loader-invalid-format.mjs +++ b/test/fixtures/es-module-loaders/loader-invalid-format.mjs @@ -1,11 +1,11 @@ -export async function resolve(specifier, { parentURL, importAssertions }, defaultResolve) { +export async function resolve(specifier, { parentURL, importAssertions }, next) { if (parentURL && specifier === '../fixtures/es-modules/test-esm-ok.mjs') { return { shortCircuit: true, url: 'file:///asdf', }; } - return defaultResolve(specifier, {parentURL, importAssertions}, defaultResolve); + return next(specifier); } export async function load(url, context, next) { @@ -16,5 +16,5 @@ export async function load(url, context, next) { source: '', } } - return next(url, context, next); + return next(url); } diff --git a/test/fixtures/es-module-loaders/loader-invalid-url.mjs b/test/fixtures/es-module-loaders/loader-invalid-url.mjs index 87d1a6a564b461..a54f39521f29ac 100644 --- a/test/fixtures/es-module-loaders/loader-invalid-url.mjs +++ b/test/fixtures/es-module-loaders/loader-invalid-url.mjs @@ -1,4 +1,4 @@ -export async function resolve(specifier, { parentURL, importAssertions }, defaultResolve) { +export async function resolve(specifier, { parentURL, importAssertions }, next) { if (parentURL && specifier === '../fixtures/es-modules/test-esm-ok.mjs') { return { shortCircuit: true, @@ -6,5 +6,5 @@ export async function resolve(specifier, { parentURL, importAssertions }, defaul importAssertions, }; } - return defaultResolve(specifier, {parentURL, importAssertions}, defaultResolve); + return next(specifier); } diff --git a/test/fixtures/es-module-loaders/loader-load-bad-next-url.mjs b/test/fixtures/es-module-loaders/loader-load-bad-next-url.mjs index 4f53b695327dd1..c6a4c7504d43e5 100644 --- a/test/fixtures/es-module-loaders/loader-load-bad-next-url.mjs +++ b/test/fixtures/es-module-loaders/loader-load-bad-next-url.mjs @@ -1,3 +1,3 @@ export async function load(url, context, next) { - return next([], context); + return next([]); } diff --git a/test/fixtures/es-module-loaders/loader-load-impersonating-next-url.mjs b/test/fixtures/es-module-loaders/loader-load-impersonating-next-url.mjs index f98b091c8b9ff5..1028e093e46819 100644 --- a/test/fixtures/es-module-loaders/loader-load-impersonating-next-url.mjs +++ b/test/fixtures/es-module-loaders/loader-load-impersonating-next-url.mjs @@ -1,3 +1,3 @@ export async function load(url, context, next) { - return next('not/a/url', context); + return next('not/a/url'); } diff --git a/test/fixtures/es-module-loaders/loader-load-next-modified.mjs b/test/fixtures/es-module-loaders/loader-load-next-modified.mjs index 1f2382467f7122..401b52971311a7 100644 --- a/test/fixtures/es-module-loaders/loader-load-next-modified.mjs +++ b/test/fixtures/es-module-loaders/loader-load-next-modified.mjs @@ -2,7 +2,7 @@ export async function load(url, context, next) { const { format, source, - } = await next(url, context); + } = await next(url); return { format, diff --git a/test/fixtures/es-module-loaders/loader-load-passthru.mjs b/test/fixtures/es-module-loaders/loader-load-passthru.mjs index 8cfbcb6a3a5d0b..0de06142007562 100644 --- a/test/fixtures/es-module-loaders/loader-load-passthru.mjs +++ b/test/fixtures/es-module-loaders/loader-load-passthru.mjs @@ -1,4 +1,4 @@ export async function load(url, context, next) { console.log('load passthru'); // This log is deliberate - return next(url, context); + return next(url); } diff --git a/test/fixtures/es-module-loaders/loader-resolve-bad-next-specifier.mjs b/test/fixtures/es-module-loaders/loader-resolve-bad-next-specifier.mjs index a23785d3d956db..66c941754f90cc 100644 --- a/test/fixtures/es-module-loaders/loader-resolve-bad-next-specifier.mjs +++ b/test/fixtures/es-module-loaders/loader-resolve-bad-next-specifier.mjs @@ -1,3 +1,3 @@ export async function resolve(specifier, context, next) { - return next([], context); + return next([]); } diff --git a/test/fixtures/es-module-loaders/loader-resolve-foo.mjs b/test/fixtures/es-module-loaders/loader-resolve-foo.mjs index 595385e12a0cf7..7d23d6c49088c9 100644 --- a/test/fixtures/es-module-loaders/loader-resolve-foo.mjs +++ b/test/fixtures/es-module-loaders/loader-resolve-foo.mjs @@ -1,4 +1,4 @@ export async function resolve(specifier, context, next) { console.log('resolve foo'); // This log is deliberate - return next('file:///foo.mjs', context); + return next('file:///foo.mjs'); } diff --git a/test/fixtures/es-module-loaders/loader-resolve-multiple-next-calls.mjs b/test/fixtures/es-module-loaders/loader-resolve-multiple-next-calls.mjs index 88d333c2404a3c..91dbec251edeba 100644 --- a/test/fixtures/es-module-loaders/loader-resolve-multiple-next-calls.mjs +++ b/test/fixtures/es-module-loaders/loader-resolve-multiple-next-calls.mjs @@ -1,6 +1,6 @@ export async function resolve(specifier, context, next) { - const { url: first } = await next(specifier, context); - const { url: second } = await next(specifier, context); + const { url: first } = await next(specifier); + const { url: second } = await next(specifier); return { format: 'module', diff --git a/test/fixtures/es-module-loaders/loader-resolve-passthru.mjs b/test/fixtures/es-module-loaders/loader-resolve-passthru.mjs index 1a373bab90ba57..3db5b21bb98793 100644 --- a/test/fixtures/es-module-loaders/loader-resolve-passthru.mjs +++ b/test/fixtures/es-module-loaders/loader-resolve-passthru.mjs @@ -1,4 +1,4 @@ export async function resolve(specifier, context, next) { console.log('resolve passthru'); // This log is deliberate - return next(specifier, context); + return next(specifier); } diff --git a/test/fixtures/es-module-loaders/loader-shared-dep.mjs b/test/fixtures/es-module-loaders/loader-shared-dep.mjs index 387575794c00dc..d41c1ae403399e 100644 --- a/test/fixtures/es-module-loaders/loader-shared-dep.mjs +++ b/test/fixtures/es-module-loaders/loader-shared-dep.mjs @@ -5,7 +5,7 @@ import { createRequire } from '../../common/index.mjs'; const require = createRequire(import.meta.url); const dep = require('./loader-dep.js'); -export function resolve(specifier, { parentURL, importAssertions }, defaultResolve) { +export function resolve(specifier, context, next) { assert.strictEqual(dep.format, 'module'); - return defaultResolve(specifier, { parentURL, importAssertions }, defaultResolve); + return next(specifier); } diff --git a/test/fixtures/es-module-loaders/loader-unknown-builtin-module.mjs b/test/fixtures/es-module-loaders/loader-unknown-builtin-module.mjs index 1063f8dfd65f2f..65e1adf909d0bc 100644 --- a/test/fixtures/es-module-loaders/loader-unknown-builtin-module.mjs +++ b/test/fixtures/es-module-loaders/loader-unknown-builtin-module.mjs @@ -3,5 +3,5 @@ export function resolve(specifier, context, next) { url: 'node:unknown-builtin-module' }; - return next(specifier, context, next); + return next(specifier); } diff --git a/test/fixtures/es-module-loaders/missing-dynamic-instantiate-hook.mjs b/test/fixtures/es-module-loaders/missing-dynamic-instantiate-hook.mjs index ec15eb0bb8fc24..5d61d81bc31ed5 100644 --- a/test/fixtures/es-module-loaders/missing-dynamic-instantiate-hook.mjs +++ b/test/fixtures/es-module-loaders/missing-dynamic-instantiate-hook.mjs @@ -1,10 +1,10 @@ -export function resolve(specifier, { parentURL }, defaultResolve) { +export function resolve(specifier, context, next) { if (specifier === 'test') { return { url: 'file://' }; } - return defaultResolve(specifier, {parentURL}, defaultResolve); + return next(specifier); } export function getFormat(url, context, defaultGetFormat) { diff --git a/test/fixtures/es-module-loaders/not-found-assert-loader.mjs b/test/fixtures/es-module-loaders/not-found-assert-loader.mjs index 5213ddedb34e8d..ea4c73724298db 100644 --- a/test/fixtures/es-module-loaders/not-found-assert-loader.mjs +++ b/test/fixtures/es-module-loaders/not-found-assert-loader.mjs @@ -1,15 +1,15 @@ -import assert from 'assert'; +import assert from 'node:assert'; // a loader that asserts that the defaultResolve will throw "not found" // (skipping the top-level main of course) let mainLoad = true; -export async function resolve(specifier, { parentURL, importAssertions }, defaultResolve) { +export async function resolve(specifier, { importAssertions }, next) { if (mainLoad) { mainLoad = false; - return defaultResolve(specifier, {parentURL, importAssertions}, defaultResolve); + return next(specifier); } try { - await defaultResolve(specifier, {parentURL, importAssertions}, defaultResolve); + await next(specifier); } catch (e) { assert.strictEqual(e.code, 'ERR_MODULE_NOT_FOUND'); diff --git a/test/fixtures/es-module-loaders/string-sources.mjs b/test/fixtures/es-module-loaders/string-sources.mjs index 1fc2b7a8d6f7e3..396d17cb17a75c 100644 --- a/test/fixtures/es-module-loaders/string-sources.mjs +++ b/test/fixtures/es-module-loaders/string-sources.mjs @@ -28,7 +28,7 @@ export function resolve(specifier, context, next) { url: specifier, }; } - return next(specifier, context); + return next(specifier); } export function load(href, context, next) { @@ -39,5 +39,5 @@ export function load(href, context, next) { source: SOURCES[href], }; } - return next(href, context); + return next(href); } From a2cd8b316ce962e9c8897eb02af977244bfda0dc Mon Sep 17 00:00:00 2001 From: theanarkh Date: Fri, 29 Jul 2022 17:00:38 +0800 Subject: [PATCH 030/177] http: make idle http parser count configurable PR-URL: https://github.com/nodejs/node/pull/43974 Reviewed-By: Paolo Insogna Reviewed-By: Matteo Collina Reviewed-By: Feng Yu --- doc/api/http.md | 10 ++++++++++ lib/http.js | 9 +++++++-- .../test-http-set-max-idle-http-parser.js | 19 +++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 test/parallel/test-http-set-max-idle-http-parser.js diff --git a/doc/api/http.md b/doc/api/http.md index 1b85a79e9771ca..59453490f36bac 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -3615,6 +3615,16 @@ try { } ``` +## `http.setMaxIdleHTTPParsers` + + + +* {number} + +Set the maximum number of idle HTTP parsers. **Default:** `1000`. + [RFC 8187]: https://www.rfc-editor.org/rfc/rfc8187.txt [`'checkContinue'`]: #event-checkcontinue [`'finish'`]: #event-finish diff --git a/lib/http.js b/lib/http.js index 1366656e42eb94..d7acafe4f317b8 100644 --- a/lib/http.js +++ b/lib/http.js @@ -27,9 +27,10 @@ const { ObjectDefineProperty, } = primordials; +const { validateInteger } = require('internal/validators'); const httpAgent = require('_http_agent'); const { ClientRequest } = require('_http_client'); -const { methods } = require('_http_common'); +const { methods, parsers } = require('_http_common'); const { IncomingMessage } = require('_http_incoming'); const { validateHeaderName, @@ -123,7 +124,11 @@ module.exports = { validateHeaderName, validateHeaderValue, get, - request + request, + setMaxIdleHTTPParsers(max) { + validateInteger(max, 'max', 1); + parsers.max = max; + } }; ObjectDefineProperty(module.exports, 'maxHeaderSize', { diff --git a/test/parallel/test-http-set-max-idle-http-parser.js b/test/parallel/test-http-set-max-idle-http-parser.js new file mode 100644 index 00000000000000..d935823a1ba946 --- /dev/null +++ b/test/parallel/test-http-set-max-idle-http-parser.js @@ -0,0 +1,19 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const httpCommon = require('_http_common'); +const http = require('http'); + +[Symbol(), {}, [], () => {}, 1n, true, '1', null, undefined].forEach((value) => { + assert.throws(() => http.setMaxIdleHTTPParsers(value), { code: 'ERR_INVALID_ARG_TYPE' }); +}); + +[-1, -Infinity, NaN, 0, 1.1].forEach((value) => { + assert.throws(() => http.setMaxIdleHTTPParsers(value), { code: 'ERR_OUT_OF_RANGE' }); +}); + +[1, Number.MAX_SAFE_INTEGER].forEach((value) => { + assert.notStrictEqual(httpCommon.parsers.max, value); + http.setMaxIdleHTTPParsers(value); + assert.strictEqual(httpCommon.parsers.max, value); +}); From ecf82186e03ea5280b4b52675346a7b201d449b3 Mon Sep 17 00:00:00 2001 From: Daeyeon Jeong Date: Fri, 29 Jul 2022 18:09:34 +0900 Subject: [PATCH 031/177] src,fs: refactor duplicated code in fs.readdir `AfterScanDirWithTypes` is almost same as `AfterScanDir` except for handling the `with file types` option. This merges the two functions into one. Signed-off-by: Daeyeon Jeong daeyeon.dev@gmail.com PR-URL: https://github.com/nodejs/node/pull/43204 Reviewed-By: Feng Yu --- src/node_file.cc | 82 +++++++++++++++--------------------------------- src/node_file.h | 3 ++ 2 files changed, 28 insertions(+), 57 deletions(-) diff --git a/src/node_file.cc b/src/node_file.cc index 6d1254958a3695..8815bd8d52e190 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -777,43 +777,6 @@ void AfterScanDir(uv_fs_t* req) { FSReqBase* req_wrap = FSReqBase::from_req(req); FSReqAfterScope after(req_wrap, req); - if (!after.Proceed()) { - return; - } - Environment* env = req_wrap->env(); - Local error; - int r; - std::vector> name_v; - - for (;;) { - uv_dirent_t ent; - - r = uv_fs_scandir_next(req, &ent); - if (r == UV_EOF) - break; - if (r != 0) { - return req_wrap->Reject(UVException( - env->isolate(), r, nullptr, req_wrap->syscall(), req->path)); - } - - MaybeLocal filename = - StringBytes::Encode(env->isolate(), - ent.name, - req_wrap->encoding(), - &error); - if (filename.IsEmpty()) - return req_wrap->Reject(error); - - name_v.push_back(filename.ToLocalChecked()); - } - - req_wrap->Resolve(Array::New(env->isolate(), name_v.data(), name_v.size())); -} - -void AfterScanDirWithTypes(uv_fs_t* req) { - FSReqBase* req_wrap = FSReqBase::from_req(req); - FSReqAfterScope after(req_wrap, req); - if (!after.Proceed()) { return; } @@ -826,6 +789,8 @@ void AfterScanDirWithTypes(uv_fs_t* req) { std::vector> name_v; std::vector> type_v; + const bool with_file_types = req_wrap->with_file_types(); + for (;;) { uv_dirent_t ent; @@ -837,23 +802,23 @@ void AfterScanDirWithTypes(uv_fs_t* req) { UVException(isolate, r, nullptr, req_wrap->syscall(), req->path)); } - MaybeLocal filename = - StringBytes::Encode(isolate, - ent.name, - req_wrap->encoding(), - &error); - if (filename.IsEmpty()) + Local filename; + if (!StringBytes::Encode(isolate, ent.name, req_wrap->encoding(), &error) + .ToLocal(&filename)) { return req_wrap->Reject(error); + } + name_v.push_back(filename); - name_v.push_back(filename.ToLocalChecked()); - type_v.emplace_back(Integer::New(isolate, ent.type)); + if (with_file_types) type_v.emplace_back(Integer::New(isolate, ent.type)); } - Local result[] = { - Array::New(isolate, name_v.data(), name_v.size()), - Array::New(isolate, type_v.data(), type_v.size()) - }; - req_wrap->Resolve(Array::New(isolate, result, arraysize(result))); + if (with_file_types) { + Local result[] = {Array::New(isolate, name_v.data(), name_v.size()), + Array::New(isolate, type_v.data(), type_v.size())}; + req_wrap->Resolve(Array::New(isolate, result, arraysize(result))); + } else { + req_wrap->Resolve(Array::New(isolate, name_v.data(), name_v.size())); + } } void Access(const FunctionCallbackInfo& args) { @@ -1650,13 +1615,16 @@ static void ReadDir(const FunctionCallbackInfo& args) { FSReqBase* req_wrap_async = GetReqWrap(args, 3); if (req_wrap_async != nullptr) { // readdir(path, encoding, withTypes, req) - if (with_types) { - AsyncCall(env, req_wrap_async, args, "scandir", encoding, - AfterScanDirWithTypes, uv_fs_scandir, *path, 0 /*flags*/); - } else { - AsyncCall(env, req_wrap_async, args, "scandir", encoding, - AfterScanDir, uv_fs_scandir, *path, 0 /*flags*/); - } + req_wrap_async->set_with_file_types(with_types); + AsyncCall(env, + req_wrap_async, + args, + "scandir", + encoding, + AfterScanDir, + uv_fs_scandir, + *path, + 0 /*flags*/); } else { // readdir(path, encoding, withTypes, undefined, ctx) CHECK_EQ(argc, 5); FSReqWrapSync req_wrap_sync; diff --git a/src/node_file.h b/src/node_file.h index 9d2834fa2673d6..45a1ad2e6ebaf4 100644 --- a/src/node_file.h +++ b/src/node_file.h @@ -89,8 +89,10 @@ class FSReqBase : public ReqWrap { enum encoding encoding() const { return encoding_; } bool use_bigint() const { return use_bigint_; } bool is_plain_open() const { return is_plain_open_; } + bool with_file_types() const { return with_file_types_; } void set_is_plain_open(bool value) { is_plain_open_ = value; } + void set_with_file_types(bool value) { with_file_types_ = value; } FSContinuationData* continuation_data() const { return continuation_data_.get(); @@ -116,6 +118,7 @@ class FSReqBase : public ReqWrap { bool has_data_ = false; bool use_bigint_ = false; bool is_plain_open_ = false; + bool with_file_types_ = false; const char* syscall_ = nullptr; BaseObjectPtr binding_data_; From 697dbfb174d802e836b28622577795294c9e9a48 Mon Sep 17 00:00:00 2001 From: NicoNekoru <66227158+NicoNekoru@users.noreply.github.com> Date: Wed, 27 Jul 2022 10:31:00 -0600 Subject: [PATCH 032/177] meta: shorten PowerShell snippet for bug-report template MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44011 Reviewed-By: Tobias Nießen Reviewed-By: Michaël Zasso Reviewed-By: Feng Yu --- .github/ISSUE_TEMPLATE/1-bug-report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/1-bug-report.yml b/.github/ISSUE_TEMPLATE/1-bug-report.yml index 3edac8d807d905..c40644bfc6d9ba 100644 --- a/.github/ISSUE_TEMPLATE/1-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/1-bug-report.yml @@ -19,7 +19,7 @@ body: label: Platform description: | UNIX: output of `uname -a` - Windows: output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in PowerShell console + Windows: output of `"$([Environment]::OSVersion.VersionString) $(('x86', 'x64')[[Environment]::Is64BitOperatingSystem])"` in PowerShell console - type: input attributes: label: Subsystem From b90b8abdd5bf10296b691a0c30141b65557187e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Thu, 28 Jul 2022 08:12:54 +0000 Subject: [PATCH 033/177] src: fix bug in GetErrorSource() Refs: https://github.com/nodejs/node/pull/43875 PR-URL: https://github.com/nodejs/node/pull/44019 Reviewed-By: Ben Noordhuis Reviewed-By: Richard Lau Reviewed-By: Chengzhong Wu Reviewed-By: Feng Yu --- src/node_errors.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node_errors.cc b/src/node_errors.cc index e49556383b41f7..af0164df26eb6f 100644 --- a/src/node_errors.cc +++ b/src/node_errors.cc @@ -105,7 +105,7 @@ static std::string GetErrorSource(Isolate* isolate, if (has_source_map_url && env != nullptr && env->source_maps_enabled()) { std::string source = GetSourceMapErrorSource( isolate, context, message, added_exception_line); - return added_exception_line ? source : sourceline; + return *added_exception_line ? source : sourceline; } // Because of how node modules work, all scripts are wrapped with a From 1b9537b6a5042d1238c2adb5d721378aeac35091 Mon Sep 17 00:00:00 2001 From: Keeley Hammond Date: Sat, 30 Jul 2022 06:49:03 -0700 Subject: [PATCH 034/177] doc: claim ABI version for Electron 21 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44034 Reviewed-By: Michaël Zasso Reviewed-By: Joyee Cheung Reviewed-By: Darshan Sen Reviewed-By: Tobias Nießen Reviewed-By: Richard Lau Reviewed-By: Luigi Pinca --- doc/abi_version_registry.json | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/abi_version_registry.json b/doc/abi_version_registry.json index 0ed54e8e717fe5..b2778a83bf954c 100644 --- a/doc/abi_version_registry.json +++ b/doc/abi_version_registry.json @@ -1,5 +1,6 @@ { "NODE_MODULE_VERSION": [ + { "modules": 109,"runtime": "electron", "variant": "electron", "versions": "21" }, { "modules": 108,"runtime": "node", "variant": "v8_10.1", "versions": "18.0.0" }, { "modules": 107,"runtime": "electron", "variant": "electron", "versions": "20" }, { "modules": 106,"runtime": "electron", "variant": "electron", "versions": "19" }, From 2ae5d853a78826eceb53ad768bdca51d91f8b645 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 30 Jul 2022 17:11:50 +0200 Subject: [PATCH 035/177] doc: fix code examples in `crypto.md` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44053 Reviewed-By: Tobias Nießen Reviewed-By: Mestery Reviewed-By: Feng Yu --- doc/api/crypto.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/doc/api/crypto.md b/doc/api/crypto.md index ce12d9472dc57f..59e07505d4a36e 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -395,11 +395,11 @@ Example: Using `Cipher` and piped streams: import { createReadStream, createWriteStream, -} from 'fs'; +} from 'node:fs'; import { pipeline -} from 'stream'; +} from 'node:stream'; const { scrypt, @@ -675,6 +675,7 @@ const decipher = createDecipheriv(algorithm, key, iv); let decrypted = ''; decipher.on('readable', () => { + let chunk; while (null !== (chunk = decipher.read())) { decrypted += chunk.toString('utf8'); } @@ -711,6 +712,7 @@ const decipher = createDecipheriv(algorithm, key, iv); let decrypted = ''; decipher.on('readable', () => { + let chunk; while (null !== (chunk = decipher.read())) { decrypted += chunk.toString('utf8'); } @@ -733,7 +735,7 @@ Example: Using `Decipher` and piped streams: import { createReadStream, createWriteStream, -} from 'fs'; +} from 'node:fs'; import { Buffer } from 'node:buffer'; const { scryptSync, @@ -3305,7 +3307,7 @@ Example: generating the sha256 sum of a file ```mjs import { createReadStream -} from 'fs'; +} from 'node:fs'; import { argv } from 'node:process'; const { createHash @@ -3391,7 +3393,7 @@ Example: generating the sha256 HMAC of a file ```mjs import { createReadStream -} from 'fs'; +} from 'node:fs'; import { argv } from 'node:process'; const { createHmac From c3fa82f007298f4f80c94e1f274f72f8788d257b Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Thu, 30 Jun 2022 10:14:35 +0300 Subject: [PATCH 036/177] test_runner: add before/after/each hooks PR-URL: https://github.com/nodejs/node/pull/43730 Fixes: https://github.com/nodejs/node/issues/43403 Reviewed-By: Benjamin Gruenbaum --- doc/api/test.md | 194 ++++++++++++++++++++ lib/internal/test_runner/harness.js | 11 ++ lib/internal/test_runner/test.js | 147 ++++++++++++--- lib/internal/test_runner/utils.js | 6 + lib/test.js | 6 +- test/message/test_runner_describe_it.js | 6 +- test/message/test_runner_hooks.js | 111 ++++++++++++ test/message/test_runner_hooks.out | 229 ++++++++++++++++++++++++ 8 files changed, 680 insertions(+), 30 deletions(-) create mode 100644 test/message/test_runner_hooks.js create mode 100644 test/message/test_runner_hooks.out diff --git a/doc/api/test.md b/doc/api/test.md index 63591482758d65..159de179ab96d5 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -446,6 +446,120 @@ same as [`it([name], { skip: true }[, fn])`][it options]. Shorthand for marking a test as `TODO`, same as [`it([name], { todo: true }[, fn])`][it options]. +### `before([, fn][, options])` + + + +* `fn` {Function|AsyncFunction} The hook function. + If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running before running a suite. + +```js +describe('tests', async () => { + before(() => console.log('about to run some test')); + it('is a subtest', () => { + assert.ok('some relevant assertion here'); + }); +}); +``` + +### `after([, fn][, options])` + + + +* `fn` {Function|AsyncFunction} The hook function. + If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running after running a suite. + +```js +describe('tests', async () => { + after(() => console.log('finished running tests')); + it('is a subtest', () => { + assert.ok('some relevant assertion here'); + }); +}); +``` + +### `beforeEach([, fn][, options])` + + + +* `fn` {Function|AsyncFunction} The hook function. + If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running +before each subtest of the current suite. + +```js +describe('tests', async () => { + beforeEach(() => t.diagnostics('about to run a test')); + it('is a subtest', () => { + assert.ok('some relevant assertion here'); + }); +}); +``` + +### `afterEach([, fn][, options])` + + + +* `fn` {Function|AsyncFunction} The hook function. + If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running +after each subtest of the current test. + +```js +describe('tests', async () => { + afterEach(() => t.diagnostics('about to run a test')); + it('is a subtest', () => { + assert.ok('some relevant assertion here'); + }); +}); +``` + ## Class: `TestContext` + +* `fn` {Function|AsyncFunction} The hook function. The first argument + to this function is a [`TestContext`][] object. If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running +before each subtest of the current test. + +```js +test('top level test', async (t) => { + t.beforeEach((t) => t.diagnostics(`about to run ${t.name}`)); + await t.test( + 'This is a subtest', + (t) => { + assert.ok('some relevant assertion here'); + } + ); +}); +``` + +### `context.afterEach([, fn][, options])` + + + +* `fn` {Function|AsyncFunction} The hook function. The first argument + to this function is a [`TestContext`][] object. If the hook uses callbacks, + the callback function is passed as the second argument. **Default:** A no-op + function. +* `options` {Object} Configuration options for the hook. The following + properties are supported: + * `signal` {AbortSignal} Allows aborting an in-progress hook + * `timeout` {number} A number of milliseconds the hook will fail after. + If unspecified, subtests inherit this value from their parent. + **Default:** `Infinity`. + +This function is used to create a hook running +after each subtest of the current test. + +```js +test('top level test', async (t) => { + t.afterEach((t) => t.diagnostics(`finished running ${t.name}`)); + await t.test( + 'This is a subtest', + (t) => { + assert.ok('some relevant assertion here'); + } + ); +}); +``` + ### `context.diagnostic(message)` + +The name of the test + ### `context.runOnly(shouldRunOnlyTests)` + +The name of the suite + ### `context.signal` -The name of the test +The name of the test. ### `context.runOnly(shouldRunOnlyTests)` @@ -809,7 +809,7 @@ exposed as part of the API. added: REPLACEME --> -The name of the suite +The name of the suite. ### `context.signal` From ea48c5673b5b21d6617b7e9a662302b9673b2ca5 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 31 Jul 2022 10:16:01 -0700 Subject: [PATCH 044/177] build: skip test-internet run on forks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Do not run test-internet scheduled GitHub Action on forks. PR-URL: https://github.com/nodejs/node/pull/44054 Reviewed-By: Antoine du Hamel Reviewed-By: Juan José Arboleda Reviewed-By: Michaël Zasso Reviewed-By: Darshan Sen --- .github/workflows/test-internet.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 4fcb18a14f5ce9..85d037c9bee5eb 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -29,6 +29,7 @@ permissions: jobs: test-internet: + if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 From 3ba75b341b0bd6f71679239ad8392b103754aba0 Mon Sep 17 00:00:00 2001 From: Daeyeon Jeong Date: Mon, 1 Aug 2022 15:37:45 +0900 Subject: [PATCH 045/177] net,tls: pass a valid socket on `tlsClientError` On the 'tlsClientError' event, the `tlsSocket` instance is passed as `closed` status. Thus, users can't get information such as `remote address`, `remoteFamily`, and so on. This adds a flag to close a socket after emitting an `error` event. Signed-off-by: Daeyeon Jeong daeyeon.dev@gmail.com PR-URL: https://github.com/nodejs/node/pull/44021 Fixes: https://github.com/nodejs/node/issues/43963 Reviewed-By: Paolo Insogna Reviewed-By: Matteo Collina --- lib/_tls_wrap.js | 4 +++ lib/net.js | 33 +++++++++++++++++++------ test/internet/test-https-issue-43963.js | 31 +++++++++++++++++++++++ 3 files changed, 61 insertions(+), 7 deletions(-) create mode 100644 test/internet/test-https-issue-43963.js diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index 50157e4fcd3e9d..11ff14d725b36b 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -423,6 +423,10 @@ function onerror(err) { if (!owner._secureEstablished) { // When handshake fails control is not yet released, // so self._tlsError will return null instead of actual error + + // Set closing the socket after emitting an event since the socket needs to + // be accessible when the `tlsClientError` event is emmited. + owner._closeAfterHandlingError = true; owner.destroy(err); } else if (owner._tlsOptions?.isServer && owner._rejectUnauthorized && diff --git a/lib/net.js b/lib/net.js index 02eb9c0d4fe5a5..66fc3578ca57fa 100644 --- a/lib/net.js +++ b/lib/net.js @@ -102,6 +102,7 @@ const { uvExceptionWithHostPort, } = require('internal/errors'); const { isUint8Array } = require('internal/util/types'); +const { queueMicrotask } = require('internal/process/task_queues'); const { validateAbortSignal, validateFunction, @@ -284,6 +285,19 @@ function initSocketHandle(self) { } } +function closeSocketHandle(self, isException, isCleanupPending = false) { + if (self._handle) { + self._handle.close(() => { + debug('emit close'); + self.emit('close', isException); + if (isCleanupPending) { + self._handle.onread = noop; + self._handle = null; + self._sockname = null; + } + }); + } +} const kBytesRead = Symbol('kBytesRead'); const kBytesWritten = Symbol('kBytesWritten'); @@ -332,6 +346,7 @@ function Socket(options) { this[kBuffer] = null; this[kBufferCb] = null; this[kBufferGen] = null; + this._closeAfterHandlingError = false; if (typeof options === 'number') options = { fd: options }; // Legacy interface. @@ -751,15 +766,19 @@ Socket.prototype._destroy = function(exception, cb) { }); if (err) this.emit('error', errnoException(err, 'reset')); + } else if (this._closeAfterHandlingError) { + // Enqueue closing the socket as a microtask, so that the socket can be + // accessible when an `error` event is handled in the `next tick queue`. + queueMicrotask(() => closeSocketHandle(this, isException, true)); } else { - this._handle.close(() => { - debug('emit close'); - this.emit('close', isException); - }); + closeSocketHandle(this, isException); + } + + if (!this._closeAfterHandlingError) { + this._handle.onread = noop; + this._handle = null; + this._sockname = null; } - this._handle.onread = noop; - this._handle = null; - this._sockname = null; cb(exception); } else { cb(exception); diff --git a/test/internet/test-https-issue-43963.js b/test/internet/test-https-issue-43963.js new file mode 100644 index 00000000000000..0d5a6109145d1b --- /dev/null +++ b/test/internet/test-https-issue-43963.js @@ -0,0 +1,31 @@ +'use strict'; +const common = require('../common'); +const https = require('node:https'); +const assert = require('node:assert'); + +const server = https.createServer(); + +server.on( + 'tlsClientError', + common.mustCall((exception, tlsSocket) => { + assert.strictEqual(exception !== undefined, true); + assert.strictEqual(Object.keys(tlsSocket.address()).length !== 0, true); + assert.strictEqual(tlsSocket.localAddress !== undefined, true); + assert.strictEqual(tlsSocket.localPort !== undefined, true); + assert.strictEqual(tlsSocket.remoteAddress !== undefined, true); + assert.strictEqual(tlsSocket.remoteFamily !== undefined, true); + assert.strictEqual(tlsSocket.remotePort !== undefined, true); + }), +); + +server.listen(0, () => { + const req = https.request({ + hostname: '127.0.0.1', + port: server.address().port, + }); + req.on( + 'error', + common.mustCall(() => server.close()), + ); + req.end(); +}); From fe83d514b278ff978e57f68b8351173dff3dd404 Mon Sep 17 00:00:00 2001 From: Ruy Adorno Date: Mon, 1 Aug 2022 14:47:56 -0400 Subject: [PATCH 046/177] doc: update collaborator email MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Updating my email on the collaborator section of README. PR-URL: https://github.com/nodejs/node/pull/44044 Reviewed-By: Luigi Pinca Reviewed-By: Mestery Reviewed-By: Antoine du Hamel Reviewed-By: Tobias Nießen Reviewed-By: Feng Yu Reviewed-By: Rich Trott Reviewed-By: Juan José Arboleda Reviewed-By: Darshan Sen --- .mailmap | 2 ++ AUTHORS | 4 ++-- README.md | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.mailmap b/.mailmap index f922b76746784f..4b04dcae299a90 100644 --- a/.mailmap +++ b/.mailmap @@ -429,6 +429,8 @@ Ron Korving Ruben Bridgewater Ruben Bridgewater Russell Dempsey +Ruy Adorno +Ruy Adorno Ryan Dahl Ryan Emery Ryan Mahan diff --git a/AUTHORS b/AUTHORS index f792f6357fe1a4..f6d15432158c4f 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1048,7 +1048,7 @@ Joran Siu Vitaly Tomilov Ratikesh Misra Alex Perkins -Beth Griggs +Beth Griggs Joe Esposito Erin Spiceland Ravindra Barthwal @@ -1784,7 +1784,7 @@ Christopher Sidebottom Edward Andrew Robinson Shakeel Mohamed Tobias Kieslich -Ruy Adorno +Ruy Adorno Stefania Sharp Pawel Golda Steven Scott diff --git a/README.md b/README.md index 4aff265ff62039..c36935ec02ec24 100644 --- a/README.md +++ b/README.md @@ -405,7 +405,7 @@ For information about the governance of the Node.js project, see * [ronag](https://github.com/ronag) - **Robert Nagy** <> * [ruyadorno](https://github.com/ruyadorno) - - **Ruy Adorno** <> (he/him) + **Ruy Adorno** <> (he/him) * [rvagg](https://github.com/rvagg) - **Rod Vagg** <> * [ryzokuken](https://github.com/ryzokuken) - From 58f2739e32ca39fc8e09a8c0b211a7d3e520e82e Mon Sep 17 00:00:00 2001 From: Erick Wendel Date: Mon, 1 Aug 2022 17:46:52 -0300 Subject: [PATCH 047/177] doc: add ErickWendel to collaborators Fixes: https://github.com/nodejs/node/issues/43755 PR-URL: https://github.com/nodejs/node/pull/44088 Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig Reviewed-By: Evan Lucas --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index c36935ec02ec24..78645b0e526667 100644 --- a/README.md +++ b/README.md @@ -310,6 +310,8 @@ For information about the governance of the Node.js project, see **Daniele Belardi** <> (he/him) * [edsadr](https://github.com/edsadr) - **Adrian Estrada** <> (he/him) +* [erickwendel](https://github.com/erickwendel) - + **Erick Wendel** <> (he/him) * [evanlucas](https://github.com/evanlucas) - **Evan Lucas** <> (he/him) * [fhinkel](https://github.com/fhinkel) - From a996f53c784a7a6e126930fc805d937bc365eb07 Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Tue, 2 Aug 2022 00:05:33 -0400 Subject: [PATCH 048/177] meta: update AUTHORS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44065 Reviewed-By: Rich Trott Reviewed-By: Luigi Pinca Reviewed-By: Darshan Sen Reviewed-By: Tobias Nießen --- .mailmap | 2 ++ AUTHORS | 12 +++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.mailmap b/.mailmap index 4b04dcae299a90..d17efa8d5beaa7 100644 --- a/.mailmap +++ b/.mailmap @@ -287,6 +287,8 @@ Ke Ding Keith M Wesolowski Kelsey Breseman Kevin Millikin +Keyhan Vakil <60900335+airtable-keyhanvakil@users.noreply.github.com> +Keyhan Vakil Khaidi Chu Khaidi Chu Kimberly Wilber diff --git a/AUTHORS b/AUTHORS index f6d15432158c4f..9b6ae6fdabc22e 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3457,7 +3457,7 @@ Yagiz Nizipli liuxingbaoyu <30521560+liuxingbaoyu@users.noreply.github.com> Sergey Nazaryev William Marlow -Keyhan Vakil <60900335+airtable-keyhanvakil@users.noreply.github.com> +Keyhan Vakil Feng Yu pupilTong rikapo @@ -3490,5 +3490,15 @@ John Gee Hrishikesh Kadam KrayzeeKev Airing +jiahao.si +Rhys +Mark S. Miller +Jianru Lin +Lenvin Gonsalves <41874033+98lenvi@users.noreply.github.com> +Andreu Botella +shhh7612 <82669765+shhh7612@users.noreply.github.com> +Dominic Saadi +ywave620 <60539365+ywave620@users.noreply.github.com> +NicoNekoru <66227158+NicoNekoru@users.noreply.github.com> # Generated by tools/update-authors.mjs From d8749c3b872799b7e627b5b2604579ab10b4221c Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Tue, 2 Aug 2022 08:52:41 +0300 Subject: [PATCH 049/177] test_runner: verbous error when entire test tree is canceled PR-URL: https://github.com/nodejs/node/pull/44060 Reviewed-By: Antoine du Hamel Reviewed-By: Benjamin Gruenbaum --- lib/internal/test_runner/harness.js | 6 ++++-- lib/internal/test_runner/test.js | 8 ++++---- test/message/test_runner_no_refs.out | 4 ++-- test/message/test_runner_unresolved_promise.out | 4 ++-- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index e9f0907c4d490d..360fe141fe70a1 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -14,7 +14,7 @@ const { }, } = require('internal/errors'); const { getOptionValue } = require('internal/options'); -const { Test, ItTest, Suite } = require('internal/test_runner/test'); +const { kCancelledByParent, Test, ItTest, Suite } = require('internal/test_runner/test'); const isTestRunner = getOptionValue('--test'); const testResources = new SafeMap(); @@ -77,7 +77,9 @@ function setup(root) { createProcessEventHandler('unhandledRejection', root); const exitHandler = () => { - root.postRun(); + root.postRun(new ERR_TEST_FAILURE( + 'Promise resolution is still pending but the event loop has already resolved', + kCancelledByParent)); let passCount = 0; let failCount = 0; diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 9ea04dfceba527..e2d068e87bb051 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -511,7 +511,7 @@ class Test extends AsyncResource { this.postRun(); } - postRun() { + postRun(pendingSubtestsError) { let failedSubtests = 0; // If the test was failed before it even started, then the end time will @@ -528,8 +528,8 @@ class Test extends AsyncResource { const subtest = this.subtests[i]; if (!subtest.finished) { - subtest.cancel(); - subtest.postRun(); + subtest.cancel(pendingSubtestsError); + subtest.postRun(pendingSubtestsError); } if (!subtest.passed) { @@ -691,4 +691,4 @@ class Suite extends Test { } } -module.exports = { kDefaultIndent, kSubtestsFailed, kTestCodeFailure, Test, Suite, ItTest }; +module.exports = { kCancelledByParent, kDefaultIndent, kSubtestsFailed, kTestCodeFailure, Test, Suite, ItTest }; diff --git a/test/message/test_runner_no_refs.out b/test/message/test_runner_no_refs.out index 63b79cd57d777f..e8560c5720b762 100644 --- a/test/message/test_runner_no_refs.out +++ b/test/message/test_runner_no_refs.out @@ -5,7 +5,7 @@ TAP version 13 --- duration_ms: * failureType: 'cancelledByParent' - error: 'test did not finish before its parent and was cancelled' + error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' stack: |- * @@ -15,7 +15,7 @@ not ok 1 - does not keep event loop alive --- duration_ms: * failureType: 'cancelledByParent' - error: 'test did not finish before its parent and was cancelled' + error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' stack: |- * diff --git a/test/message/test_runner_unresolved_promise.out b/test/message/test_runner_unresolved_promise.out index 2bb543cd4554fe..b4d6cba4ca1b43 100644 --- a/test/message/test_runner_unresolved_promise.out +++ b/test/message/test_runner_unresolved_promise.out @@ -9,7 +9,7 @@ not ok 2 - never resolving promise --- duration_ms: * failureType: 'cancelledByParent' - error: 'test did not finish before its parent and was cancelled' + error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' stack: |- * @@ -19,7 +19,7 @@ not ok 3 - fail --- duration_ms: 0 failureType: 'cancelledByParent' - error: 'test did not finish before its parent and was cancelled' + error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' stack: |- * From ec1b31e6ad55e20289bb650a7e279145bcad7e04 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 2 Aug 2022 04:21:40 -0700 Subject: [PATCH 050/177] build: allow test-internet on forks if not scheduled MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/pull/44054#issuecomment-1200246681 PR-URL: https://github.com/nodejs/node/pull/44073 Reviewed-By: Michaël Zasso Reviewed-By: Antoine du Hamel Reviewed-By: Tobias Nießen Reviewed-By: Mestery --- .github/workflows/test-internet.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 85d037c9bee5eb..1e968652896f1e 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -29,7 +29,7 @@ permissions: jobs: test-internet: - if: github.repository == 'nodejs/node' + if: github.repository == 'nodejs/node' || github.event_name != 'schedule' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 From c4783e37d70c007709aadf56d76d6efd070bec6f Mon Sep 17 00:00:00 2001 From: legendecas Date: Sun, 31 Jul 2022 23:16:48 +0800 Subject: [PATCH 051/177] src: nest namespace report in namespace node PR-URL: https://github.com/nodejs/node/pull/44069 Reviewed-By: Darshan Sen Reviewed-By: Colin Ihrig Reviewed-By: Feng Yu Reviewed-By: Minwoo Jung --- src/node_report.cc | 43 +++++++++++++++---------------------- src/node_report.h | 15 ++++++++++--- src/node_report_module.cc | 45 ++++++++++++++++++--------------------- src/node_report_utils.cc | 8 +++---- 4 files changed, 53 insertions(+), 58 deletions(-) diff --git a/src/node_report.cc b/src/node_report.cc index edc8ab802b06f7..455d5c4e875060 100644 --- a/src/node_report.cc +++ b/src/node_report.cc @@ -27,15 +27,9 @@ constexpr int NODE_REPORT_VERSION = 2; constexpr int NANOS_PER_SEC = 1000 * 1000 * 1000; constexpr double SEC_PER_MICROS = 1e-6; +namespace node { namespace report { -using node::arraysize; -using node::ConditionVariable; -using node::DiagnosticFilename; -using node::Environment; -using node::JSONWriter; -using node::Mutex; -using node::NativeSymbolDebuggingContext; -using node::TIME_TYPE; + using node::worker::Worker; using v8::Array; using v8::Context; @@ -54,8 +48,6 @@ using v8::TryCatch; using v8::V8; using v8::Value; -namespace per_process = node::per_process; - // Internal/static function declarations static void WriteNodeReport(Isolate* isolate, Environment* env, @@ -129,7 +121,7 @@ std::string TriggerNodeReport(Isolate* isolate, // Regular file. Append filename to directory path if one was specified if (report_directory.length() > 0) { std::string pathname = report_directory; - pathname += node::kPathSeparator; + pathname += kPathSeparator; pathname += filename; outfile.open(pathname, std::ios::out | std::ios::binary); } else { @@ -260,9 +252,9 @@ static void WriteNodeReport(Isolate* isolate, } // Report out the command line. - if (!node::per_process::cli_options->cmdline.empty()) { + if (!per_process::cli_options->cmdline.empty()) { writer.json_arraystart("commandLine"); - for (const std::string& arg : node::per_process::cli_options->cmdline) { + for (const std::string& arg : per_process::cli_options->cmdline) { writer.json_element(arg); } writer.json_arrayend(); @@ -377,8 +369,8 @@ static void PrintVersionInformation(JSONWriter* writer) { // Report Process word size writer->json_keyvalue("wordSize", sizeof(void*) * 8); - writer->json_keyvalue("arch", node::per_process::metadata.arch); - writer->json_keyvalue("platform", node::per_process::metadata.platform); + writer->json_keyvalue("arch", per_process::metadata.arch); + writer->json_keyvalue("platform", per_process::metadata.platform); // Report deps component versions PrintComponentVersions(writer); @@ -528,7 +520,7 @@ static Maybe ErrorToString(Isolate* isolate, maybe_str = error->ToString(context); } else if (error->IsObject()) { MaybeLocal stack = error.As()->Get( - context, node::FIXED_ONE_BYTE_STRING(isolate, "stack")); + context, FIXED_ONE_BYTE_STRING(isolate, "stack")); if (!stack.IsEmpty() && stack.ToLocalChecked()->IsString()) { maybe_str = stack.ToLocalChecked().As(); } @@ -656,7 +648,7 @@ static void PrintGCStatistics(JSONWriter* writer, Isolate* isolate) { static void PrintResourceUsage(JSONWriter* writer) { // Get process uptime in seconds uint64_t uptime = - (uv_hrtime() - node::per_process::node_start_time) / (NANOS_PER_SEC); + (uv_hrtime() - per_process::node_start_time) / (NANOS_PER_SEC); if (uptime == 0) uptime = 1; // avoid division by zero. // Process and current thread usage statistics @@ -714,7 +706,7 @@ static void PrintSystemInformation(JSONWriter* writer) { writer->json_objectstart("environmentVariables"); { - Mutex::ScopedLock lock(node::per_process::env_var_mutex); + Mutex::ScopedLock lock(per_process::env_var_mutex); r = uv_os_environ(&envitems, &envcount); } @@ -794,8 +786,7 @@ static void PrintComponentVersions(JSONWriter* writer) { writer->json_objectstart("componentVersions"); -#define V(key) \ - writer->json_keyvalue(#key, node::per_process::metadata.versions.key); +#define V(key) writer->json_keyvalue(#key, per_process::metadata.versions.key); NODE_VERSIONS_KEYS(V) #undef V @@ -805,18 +796,17 @@ static void PrintComponentVersions(JSONWriter* writer) { // Report runtime release information. static void PrintRelease(JSONWriter* writer) { writer->json_objectstart("release"); - writer->json_keyvalue("name", node::per_process::metadata.release.name); + writer->json_keyvalue("name", per_process::metadata.release.name); #if NODE_VERSION_IS_LTS - writer->json_keyvalue("lts", node::per_process::metadata.release.lts); + writer->json_keyvalue("lts", per_process::metadata.release.lts); #endif #ifdef NODE_HAS_RELEASE_URLS writer->json_keyvalue("headersUrl", - node::per_process::metadata.release.headers_url); - writer->json_keyvalue("sourceUrl", - node::per_process::metadata.release.source_url); + per_process::metadata.release.headers_url); + writer->json_keyvalue("sourceUrl", per_process::metadata.release.source_url); #ifdef _WIN32 - writer->json_keyvalue("libUrl", node::per_process::metadata.release.lib_url); + writer->json_keyvalue("libUrl", per_process::metadata.release.lib_url); #endif // _WIN32 #endif // NODE_HAS_RELEASE_URLS @@ -824,3 +814,4 @@ static void PrintRelease(JSONWriter* writer) { } } // namespace report +} // namespace node diff --git a/src/node_report.h b/src/node_report.h index a8292eb2dd477d..dde48f14ec0f43 100644 --- a/src/node_report.h +++ b/src/node_report.h @@ -1,4 +1,7 @@ -#pragma once +#ifndef SRC_NODE_REPORT_H_ +#define SRC_NODE_REPORT_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "node.h" #include "node_buffer.h" @@ -12,17 +15,18 @@ #include +namespace node { namespace report { // Function declarations - functions in src/node_report.cc std::string TriggerNodeReport(v8::Isolate* isolate, - node::Environment* env, + Environment* env, const char* message, const char* trigger, const std::string& name, v8::Local error); void GetNodeReport(v8::Isolate* isolate, - node::Environment* env, + Environment* env, const char* message, const char* trigger, v8::Local error, @@ -45,3 +49,8 @@ void WriteReport(const v8::FunctionCallbackInfo& info); void GetReport(const v8::FunctionCallbackInfo& info); } // namespace report +} // namespace node + +#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#endif // SRC_NODE_REPORT_H_ diff --git a/src/node_report_module.cc b/src/node_report_module.cc index b57a933972d6c0..29da71b9e2a71f 100644 --- a/src/node_report_module.cc +++ b/src/node_report_module.cc @@ -15,11 +15,8 @@ #include #include +namespace node { namespace report { -using node::Environment; -using node::Mutex; -using node::SetMethod; -using node::Utf8Value; using v8::Context; using v8::FunctionCallbackInfo; using v8::HandleScope; @@ -77,48 +74,48 @@ void GetReport(const FunctionCallbackInfo& info) { } static void GetCompact(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); - info.GetReturnValue().Set(node::per_process::cli_options->report_compact); + Mutex::ScopedLock lock(per_process::cli_options_mutex); + info.GetReturnValue().Set(per_process::cli_options->report_compact); } static void SetCompact(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); + Mutex::ScopedLock lock(per_process::cli_options_mutex); Environment* env = Environment::GetCurrent(info); Isolate* isolate = env->isolate(); bool compact = info[0]->ToBoolean(isolate)->Value(); - node::per_process::cli_options->report_compact = compact; + per_process::cli_options->report_compact = compact; } static void GetDirectory(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); + Mutex::ScopedLock lock(per_process::cli_options_mutex); Environment* env = Environment::GetCurrent(info); - std::string directory = node::per_process::cli_options->report_directory; + std::string directory = per_process::cli_options->report_directory; auto result = String::NewFromUtf8(env->isolate(), directory.c_str()); info.GetReturnValue().Set(result.ToLocalChecked()); } static void SetDirectory(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); + Mutex::ScopedLock lock(per_process::cli_options_mutex); Environment* env = Environment::GetCurrent(info); CHECK(info[0]->IsString()); Utf8Value dir(env->isolate(), info[0].As()); - node::per_process::cli_options->report_directory = *dir; + per_process::cli_options->report_directory = *dir; } static void GetFilename(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); + Mutex::ScopedLock lock(per_process::cli_options_mutex); Environment* env = Environment::GetCurrent(info); - std::string filename = node::per_process::cli_options->report_filename; + std::string filename = per_process::cli_options->report_filename; auto result = String::NewFromUtf8(env->isolate(), filename.c_str()); info.GetReturnValue().Set(result.ToLocalChecked()); } static void SetFilename(const FunctionCallbackInfo& info) { - node::Mutex::ScopedLock lock(node::per_process::cli_options_mutex); + Mutex::ScopedLock lock(per_process::cli_options_mutex); Environment* env = Environment::GetCurrent(info); CHECK(info[0]->IsString()); Utf8Value name(env->isolate(), info[0].As()); - node::per_process::cli_options->report_filename = *name; + per_process::cli_options->report_filename = *name; } static void GetSignal(const FunctionCallbackInfo& info) { @@ -136,15 +133,14 @@ static void SetSignal(const FunctionCallbackInfo& info) { } static void ShouldReportOnFatalError(const FunctionCallbackInfo& info) { - Mutex::ScopedLock lock(node::per_process::cli_options_mutex); - info.GetReturnValue().Set( - node::per_process::cli_options->report_on_fatalerror); + Mutex::ScopedLock lock(per_process::cli_options_mutex); + info.GetReturnValue().Set(per_process::cli_options->report_on_fatalerror); } static void SetReportOnFatalError(const FunctionCallbackInfo& info) { CHECK(info[0]->IsBoolean()); - Mutex::ScopedLock lock(node::per_process::cli_options_mutex); - node::per_process::cli_options->report_on_fatalerror = info[0]->IsTrue(); + Mutex::ScopedLock lock(per_process::cli_options_mutex); + per_process::cli_options->report_on_fatalerror = info[0]->IsTrue(); } static void ShouldReportOnSignal(const FunctionCallbackInfo& info) { @@ -201,7 +197,7 @@ static void Initialize(Local exports, SetReportOnUncaughtException); } -void RegisterExternalReferences(node::ExternalReferenceRegistry* registry) { +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(WriteReport); registry->Register(GetReport); registry->Register(GetCompact); @@ -221,6 +217,7 @@ void RegisterExternalReferences(node::ExternalReferenceRegistry* registry) { } } // namespace report +} // namespace node -NODE_MODULE_CONTEXT_AWARE_INTERNAL(report, report::Initialize) -NODE_MODULE_EXTERNAL_REFERENCE(report, report::RegisterExternalReferences) +NODE_MODULE_CONTEXT_AWARE_INTERNAL(report, node::report::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(report, node::report::RegisterExternalReferences) diff --git a/src/node_report_utils.cc b/src/node_report_utils.cc index 6d8b211b6d1c51..322bc7d5d8579a 100644 --- a/src/node_report_utils.cc +++ b/src/node_report_utils.cc @@ -3,11 +3,9 @@ #include "node_report.h" #include "util-inl.h" +namespace node { namespace report { -using node::JSONWriter; -using node::MallocedBuffer; - static constexpr auto null = JSONWriter::Null{}; // Utility function to format socket information. @@ -210,8 +208,7 @@ void WalkHandle(uv_handle_t* h, void* arg) { // SIGWINCH is used by libuv so always appears. // See http://docs.libuv.org/en/v1.x/signal.html writer->json_keyvalue("signum", handle->signal.signum); - writer->json_keyvalue("signal", - node::signo_string(handle->signal.signum)); + writer->json_keyvalue("signal", signo_string(handle->signal.signum)); break; default: break; @@ -269,3 +266,4 @@ void WalkHandle(uv_handle_t* h, void* arg) { } } // namespace report +} // namespace node From 38cdb1f9b6f5e89c38d405ee2a09e19f9e13f8ac Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 2 Aug 2022 21:04:35 -0700 Subject: [PATCH 052/177] src: remove usages of GetBackingStore in startup This removes all usages of GetBackingStore in startup. See the linked issue for an explanation. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44078 Reviewed-By: Darshan Sen Reviewed-By: Matteo Collina Reviewed-By: Feng Yu Reviewed-By: Chengzhong Wu Reviewed-By: Anna Henningsen --- src/node.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node.cc b/src/node.cc index fb02398df18cef..5cbe48abd184e9 100644 --- a/src/node.cc +++ b/src/node.cc @@ -266,10 +266,10 @@ static void AtomicsWaitCallback(Isolate::AtomicsWaitEvent event, fprintf(stderr, "(node:%d) [Thread %" PRIu64 "] Atomics.wait(%p + %zx, %" PRId64 - ", %.f) %s\n", + ", %.f) %s\n", static_cast(uv_os_getpid()), env->thread_id(), - array_buffer->GetBackingStore()->Data(), + array_buffer->Data(), offset_in_bytes, value, timeout_in_ms, From a54e4d4170c5ee9df7677f43c234a46761c577c3 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 2 Aug 2022 21:04:44 -0700 Subject: [PATCH 053/177] src: remove usages of GetBackingStore in WASI This removes all usages of GetBackingStore in WASI. See the linked issue for an explanation. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44077 Reviewed-By: Jiawen Geng Reviewed-By: Darshan Sen Reviewed-By: Gus Caplan Reviewed-By: Feng Yu Reviewed-By: Matteo Collina Reviewed-By: Anna Henningsen --- src/node_wasi.cc | 8 +++----- src/node_wasm_web_api.cc | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/node_wasi.cc b/src/node_wasi.cc index a1b244018a6760..e5905f17a5b27e 100644 --- a/src/node_wasi.cc +++ b/src/node_wasi.cc @@ -73,7 +73,6 @@ inline void Debug(WASI* wasi, Args&&... args) { } while (0) using v8::Array; -using v8::BackingStore; using v8::BigInt; using v8::Context; using v8::Exception; @@ -1654,10 +1653,9 @@ void WASI::_SetMemory(const FunctionCallbackInfo& args) { uvwasi_errno_t WASI::backingStore(char** store, size_t* byte_length) { Local memory = PersistentToLocal::Strong(this->memory_); - std::shared_ptr backing_store = - memory->Buffer()->GetBackingStore(); - *byte_length = backing_store->ByteLength(); - *store = static_cast(backing_store->Data()); + Local ab = memory->Buffer(); + *byte_length = ab->ByteLength(); + *store = static_cast(ab->Data()); CHECK_NOT_NULL(*store); return UVWASI_ESUCCESS; } diff --git a/src/node_wasm_web_api.cc b/src/node_wasm_web_api.cc index 7fc423978b2a7c..f327124d388749 100644 --- a/src/node_wasm_web_api.cc +++ b/src/node_wasm_web_api.cc @@ -107,12 +107,12 @@ void WasmStreamingObject::Push(const FunctionCallbackInfo& args) { if (LIKELY(chunk->IsArrayBufferView())) { Local view = chunk.As(); - bytes = view->Buffer()->GetBackingStore()->Data(); + bytes = view->Buffer()->Data(); offset = view->ByteOffset(); size = view->ByteLength(); } else if (LIKELY(chunk->IsArrayBuffer())) { Local buffer = chunk.As(); - bytes = buffer->GetBackingStore()->Data(); + bytes = buffer->Data(); offset = 0; size = buffer->ByteLength(); } else { From cddf3eda285b0b6f5455d1eab302919e9a4e2d85 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 2 Aug 2022 21:04:53 -0700 Subject: [PATCH 054/177] src: remove usages of GetBackingStore in modules This removes all usages of GetBackingStore in modules. See the linked issue for an explanation. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44076 Reviewed-By: Darshan Sen Reviewed-By: Feng Yu Reviewed-By: Matteo Collina Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Anna Henningsen Reviewed-By: Santiago Gimeno --- src/module_wrap.cc | 4 ++-- src/node_contextify.cc | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/module_wrap.cc b/src/module_wrap.cc index b445189d388036..f4557ee9915c65 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -178,8 +178,8 @@ void ModuleWrap::New(const FunctionCallbackInfo& args) { if (!args[5]->IsUndefined()) { CHECK(args[5]->IsArrayBufferView()); Local cached_data_buf = args[5].As(); - uint8_t* data = static_cast( - cached_data_buf->Buffer()->GetBackingStore()->Data()); + uint8_t* data = + static_cast(cached_data_buf->Buffer()->Data()); cached_data = new ScriptCompiler::CachedData(data + cached_data_buf->ByteOffset(), cached_data_buf->ByteLength()); diff --git a/src/node_contextify.cc b/src/node_contextify.cc index 95ba72c3db268c..969296656b1059 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -746,8 +746,7 @@ void ContextifyScript::New(const FunctionCallbackInfo& args) { ScriptCompiler::CachedData* cached_data = nullptr; if (!cached_data_buf.IsEmpty()) { - uint8_t* data = static_cast( - cached_data_buf->Buffer()->GetBackingStore()->Data()); + uint8_t* data = static_cast(cached_data_buf->Buffer()->Data()); cached_data = new ScriptCompiler::CachedData( data + cached_data_buf->ByteOffset(), cached_data_buf->ByteLength()); } @@ -1068,8 +1067,7 @@ void ContextifyContext::CompileFunction( // Read cache from cached data buffer ScriptCompiler::CachedData* cached_data = nullptr; if (!cached_data_buf.IsEmpty()) { - uint8_t* data = static_cast( - cached_data_buf->Buffer()->GetBackingStore()->Data()); + uint8_t* data = static_cast(cached_data_buf->Buffer()->Data()); cached_data = new ScriptCompiler::CachedData( data + cached_data_buf->ByteOffset(), cached_data_buf->ByteLength()); } From 8e1b7e2b8f0c6dcaa99fd67cd0ead4a9b1c2a3d2 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 2 Aug 2022 21:05:02 -0700 Subject: [PATCH 055/177] src: remove usages of GetBackingStore in node-api This removes all usages of GetBackingStore in `node-api`. See the linked issue for an explanation. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44075 Reviewed-By: Darshan Sen Reviewed-By: Jiawen Geng Reviewed-By: Feng Yu Reviewed-By: Matteo Collina Reviewed-By: Chengzhong Wu Reviewed-By: Anna Henningsen --- src/js_native_api_v8.cc | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/js_native_api_v8.cc b/src/js_native_api_v8.cc index 5f8e21e0b58c8a..db0cd6eb01e4a6 100644 --- a/src/js_native_api_v8.cc +++ b/src/js_native_api_v8.cc @@ -2780,7 +2780,7 @@ napi_status NAPI_CDECL napi_create_arraybuffer(napi_env env, // Optionally return a pointer to the buffer's data, to avoid another call to // retrieve it. if (data != nullptr) { - *data = buffer->GetBackingStore()->Data(); + *data = buffer->Data(); } *result = v8impl::JsValueFromV8LocalValue(buffer); @@ -2814,15 +2814,14 @@ napi_status NAPI_CDECL napi_get_arraybuffer_info(napi_env env, v8::Local value = v8impl::V8LocalValueFromJsValue(arraybuffer); RETURN_STATUS_IF_FALSE(env, value->IsArrayBuffer(), napi_invalid_arg); - std::shared_ptr backing_store = - value.As()->GetBackingStore(); + v8::Local ab = value.As(); if (data != nullptr) { - *data = backing_store->Data(); + *data = ab->Data(); } if (byte_length != nullptr) { - *byte_length = backing_store->ByteLength(); + *byte_length = ab->ByteLength(); } return napi_clear_last_error(env); @@ -2963,8 +2962,7 @@ napi_status NAPI_CDECL napi_get_typedarray_info(napi_env env, } if (data != nullptr) { - *data = static_cast(buffer->GetBackingStore()->Data()) + - array->ByteOffset(); + *data = static_cast(buffer->Data()) + array->ByteOffset(); } if (arraybuffer != nullptr) { @@ -3044,8 +3042,7 @@ napi_status NAPI_CDECL napi_get_dataview_info(napi_env env, } if (data != nullptr) { - *data = static_cast(buffer->GetBackingStore()->Data()) + - array->ByteOffset(); + *data = static_cast(buffer->Data()) + array->ByteOffset(); } if (arraybuffer != nullptr) { @@ -3255,8 +3252,8 @@ napi_status NAPI_CDECL napi_is_detached_arraybuffer(napi_env env, v8::Local value = v8impl::V8LocalValueFromJsValue(arraybuffer); - *result = value->IsArrayBuffer() && - value.As()->GetBackingStore()->Data() == nullptr; + *result = + value->IsArrayBuffer() && value.As()->Data() == nullptr; return napi_clear_last_error(env); } From a2022e5affa7a6215b28958748934bb90b3322b9 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Tue, 2 Aug 2022 23:45:45 -0700 Subject: [PATCH 056/177] src: remove unowned usages of GetBackingStore This removes all usages of GetBackingStore without any entries in the `CODEOWNERS` file. For the most part this is a pretty straightforward review; except `SPREAD_BUFFER_ARG` and the changes to `CopyArrayBuffer`. See the linked issue for an explanation. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44080 Reviewed-By: Darshan Sen Reviewed-By: Matteo Collina Reviewed-By: Anna Henningsen --- src/aliased_buffer.h | 7 ++--- src/node_buffer.cc | 59 ++++++++++++++++++++++--------------- src/node_os.cc | 2 +- src/node_process_methods.cc | 6 ++-- src/node_worker.cc | 4 +-- src/node_zlib.cc | 3 +- src/util-inl.h | 3 +- src/util.h | 19 +++++------- 8 files changed, 53 insertions(+), 50 deletions(-) diff --git a/src/aliased_buffer.h b/src/aliased_buffer.h index 6dda51c14615cc..98ea2d31febce2 100644 --- a/src/aliased_buffer.h +++ b/src/aliased_buffer.h @@ -50,7 +50,7 @@ class AliasedBufferBase { // allocate v8 ArrayBuffer v8::Local ab = v8::ArrayBuffer::New( isolate_, size_in_bytes); - buffer_ = static_cast(ab->GetBackingStore()->Data()); + buffer_ = static_cast(ab->Data()); // allocate v8 TypedArray v8::Local js_array = V8T::New(ab, byte_offset_, count); @@ -119,8 +119,7 @@ class AliasedBufferBase { // be removed when we expand the snapshot support. DCHECK_EQ(count_, arr->Length()); DCHECK_EQ(byte_offset_, arr->ByteOffset()); - uint8_t* raw = - static_cast(arr->Buffer()->GetBackingStore()->Data()); + uint8_t* raw = static_cast(arr->Buffer()->Data()); buffer_ = reinterpret_cast(raw + byte_offset_); js_array_.Reset(isolate_, arr); index_ = nullptr; @@ -278,7 +277,7 @@ class AliasedBufferBase { isolate_, new_size_in_bytes); // allocate new native buffer - NativeT* new_buffer = static_cast(ab->GetBackingStore()->Data()); + NativeT* new_buffer = static_cast(ab->Data()); // copy old content memcpy(new_buffer, buffer_, old_size_in_bytes); diff --git a/src/node_buffer.cc b/src/node_buffer.cc index f8885ab3771b12..641c5f17992daa 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -244,8 +244,7 @@ bool HasInstance(Local obj) { char* Data(Local val) { CHECK(val->IsArrayBufferView()); Local ui = val.As(); - return static_cast(ui->Buffer()->GetBackingStore()->Data()) + - ui->ByteOffset(); + return static_cast(ui->Buffer()->Data()) + ui->ByteOffset(); } @@ -1157,14 +1156,13 @@ static void EncodeInto(const FunctionCallbackInfo& args) { Local dest = args[1].As(); Local buf = dest->Buffer(); - char* write_result = - static_cast(buf->GetBackingStore()->Data()) + dest->ByteOffset(); + char* write_result = static_cast(buf->Data()) + dest->ByteOffset(); size_t dest_length = dest->ByteLength(); // results = [ read, written ] Local result_arr = args[2].As(); uint32_t* results = reinterpret_cast( - static_cast(result_arr->Buffer()->GetBackingStore()->Data()) + + static_cast(result_arr->Buffer()->Data()) + result_arr->ByteOffset()); int nchars; @@ -1228,6 +1226,27 @@ void DetachArrayBuffer(const FunctionCallbackInfo& args) { } } +namespace { + +std::pair DecomposeBufferToParts(Local buffer) { + void* pointer; + size_t byte_length; + if (buffer->IsArrayBuffer()) { + Local ab = buffer.As(); + pointer = ab->Data(); + byte_length = ab->ByteLength(); + } else if (buffer->IsSharedArrayBuffer()) { + Local ab = buffer.As(); + pointer = ab->Data(); + byte_length = ab->ByteLength(); + } else { + UNREACHABLE(); // Caller must validate. + } + return {pointer, byte_length}; +} + +} // namespace + void CopyArrayBuffer(const FunctionCallbackInfo& args) { // args[0] == Destination ArrayBuffer // args[1] == Destination ArrayBuffer Offset @@ -1241,32 +1260,24 @@ void CopyArrayBuffer(const FunctionCallbackInfo& args) { CHECK(args[3]->IsUint32()); CHECK(args[4]->IsUint32()); - std::shared_ptr destination; - std::shared_ptr source; + void* destination; + size_t destination_byte_length; + std::tie(destination, destination_byte_length) = + DecomposeBufferToParts(args[0]); - if (args[0]->IsArrayBuffer()) { - destination = args[0].As()->GetBackingStore(); - } else if (args[0]->IsSharedArrayBuffer()) { - destination = args[0].As()->GetBackingStore(); - } - - if (args[2]->IsArrayBuffer()) { - source = args[2].As()->GetBackingStore(); - } else if (args[0]->IsSharedArrayBuffer()) { - source = args[2].As()->GetBackingStore(); - } + void* source; + size_t source_byte_length; + std::tie(source, source_byte_length) = DecomposeBufferToParts(args[2]); uint32_t destination_offset = args[1].As()->Value(); uint32_t source_offset = args[3].As()->Value(); size_t bytes_to_copy = args[4].As()->Value(); - CHECK_GE(destination->ByteLength() - destination_offset, bytes_to_copy); - CHECK_GE(source->ByteLength() - source_offset, bytes_to_copy); + CHECK_GE(destination_byte_length - destination_offset, bytes_to_copy); + CHECK_GE(source_byte_length - source_offset, bytes_to_copy); - uint8_t* dest = - static_cast(destination->Data()) + destination_offset; - uint8_t* src = - static_cast(source->Data()) + source_offset; + uint8_t* dest = static_cast(destination) + destination_offset; + uint8_t* src = static_cast(source) + source_offset; memcpy(dest, src, bytes_to_copy); } diff --git a/src/node_os.cc b/src/node_os.cc index 5b4d6567fb3b0a..23218310974b7b 100644 --- a/src/node_os.cc +++ b/src/node_os.cc @@ -161,7 +161,7 @@ static void GetLoadAvg(const FunctionCallbackInfo& args) { Local array = args[0].As(); CHECK_EQ(array->Length(), 3); Local ab = array->Buffer(); - double* loadavg = static_cast(ab->GetBackingStore()->Data()); + double* loadavg = static_cast(ab->Data()); uv_loadavg(loadavg); } diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index 024212132f244c..9d4a5abb0163b6 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -116,7 +116,7 @@ static void CPUUsage(const FunctionCallbackInfo& args) { // Get the double array pointer from the Float64Array argument. Local ab = get_fields_array_buffer(args, 0, 2); - double* fields = static_cast(ab->GetBackingStore()->Data()); + double* fields = static_cast(ab->Data()); // Set the Float64Array elements to be user / system values in microseconds. fields[0] = MICROS_PER_SEC * rusage.ru_utime.tv_sec + rusage.ru_utime.tv_usec; @@ -189,7 +189,7 @@ static void MemoryUsage(const FunctionCallbackInfo& args) { // Get the double array pointer from the Float64Array argument. Local ab = get_fields_array_buffer(args, 0, 5); - double* fields = static_cast(ab->GetBackingStore()->Data()); + double* fields = static_cast(ab->Data()); size_t rss; int err = uv_resident_set_memory(&rss); @@ -311,7 +311,7 @@ static void ResourceUsage(const FunctionCallbackInfo& args) { return env->ThrowUVException(err, "uv_getrusage"); Local ab = get_fields_array_buffer(args, 0, 16); - double* fields = static_cast(ab->GetBackingStore()->Data()); + double* fields = static_cast(ab->Data()); fields[0] = MICROS_PER_SEC * rusage.ru_utime.tv_sec + rusage.ru_utime.tv_usec; fields[1] = MICROS_PER_SEC * rusage.ru_stime.tv_sec + rusage.ru_stime.tv_usec; diff --git a/src/node_worker.cc b/src/node_worker.cc index 5ddaae49c7ae00..fa7ee161186a77 100644 --- a/src/node_worker.cc +++ b/src/node_worker.cc @@ -710,9 +710,7 @@ void Worker::GetResourceLimits(const FunctionCallbackInfo& args) { Local Worker::GetResourceLimits(Isolate* isolate) const { Local ab = ArrayBuffer::New(isolate, sizeof(resource_limits_)); - memcpy(ab->GetBackingStore()->Data(), - resource_limits_, - sizeof(resource_limits_)); + memcpy(ab->Data(), resource_limits_, sizeof(resource_limits_)); return Float64Array::New(ab, 0, kTotalResourceLimitCount); } diff --git a/src/node_zlib.cc b/src/node_zlib.cc index e2433d887b421a..c217861e82ae1d 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -606,8 +606,7 @@ class ZlibStream final : public CompressionStream { CHECK(args[4]->IsUint32Array()); Local array = args[4].As(); Local ab = array->Buffer(); - uint32_t* write_result = static_cast( - ab->GetBackingStore()->Data()); + uint32_t* write_result = static_cast(ab->Data()); CHECK(args[5]->IsFunction()); Local write_js_callback = args[5].As(); diff --git a/src/util-inl.h b/src/util-inl.h index 327893618525f8..caadba9dae2caa 100644 --- a/src/util-inl.h +++ b/src/util-inl.h @@ -537,8 +537,7 @@ void ArrayBufferViewContents::Read(v8::Local abv) { static_assert(sizeof(T) == 1, "Only supports one-byte data at the moment"); length_ = abv->ByteLength(); if (length_ > sizeof(stack_storage_) || abv->HasBuffer()) { - data_ = static_cast(abv->Buffer()->GetBackingStore()->Data()) + - abv->ByteOffset(); + data_ = static_cast(abv->Buffer()->Data()) + abv->ByteOffset(); } else { abv->CopyContents(stack_storage_, sizeof(stack_storage_)); data_ = stack_storage_; diff --git a/src/util.h b/src/util.h index ea3ccb364bd421..290862e21c1144 100644 --- a/src/util.h +++ b/src/util.h @@ -535,17 +535,14 @@ class BufferValue : public MaybeStackBuffer { inline std::string ToString() const { return std::string(out(), length()); } }; -#define SPREAD_BUFFER_ARG(val, name) \ - CHECK((val)->IsArrayBufferView()); \ - v8::Local name = (val).As(); \ - std::shared_ptr name##_bs = \ - name->Buffer()->GetBackingStore(); \ - const size_t name##_offset = name->ByteOffset(); \ - const size_t name##_length = name->ByteLength(); \ - char* const name##_data = \ - static_cast(name##_bs->Data()) + name##_offset; \ - if (name##_length > 0) \ - CHECK_NE(name##_data, nullptr); +#define SPREAD_BUFFER_ARG(val, name) \ + CHECK((val)->IsArrayBufferView()); \ + v8::Local name = (val).As(); \ + const size_t name##_offset = name->ByteOffset(); \ + const size_t name##_length = name->ByteLength(); \ + char* const name##_data = \ + static_cast(name->Buffer()->Data()) + name##_offset; \ + if (name##_length > 0) CHECK_NE(name##_data, nullptr); // Use this when a variable or parameter is unused in order to explicitly // silence a compiler warning about that. From e0efd9af50671b6fc7d4441698ad972cd9912cf7 Mon Sep 17 00:00:00 2001 From: Chengzhong Wu Date: Thu, 4 Aug 2022 01:01:53 +0800 Subject: [PATCH 057/177] meta: add codeowner for src/node_snapshot* MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44113 Reviewed-By: Matteo Collina Reviewed-By: Richard Lau Reviewed-By: Darshan Sen Reviewed-By: Joyee Cheung Reviewed-By: Tobias Nießen --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6d69bbf54be1d6..87a37ad5054a24 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -108,6 +108,7 @@ /benchmark/misc/startup.js @nodejs/startup /src/node.cc @nodejs/startup /src/node_native_module* @nodejs/startup +/src/node_snapshot* @nodejs/startup /lib/internal/bootstrap/* @nodejs/startup /tools/snapshot/* @nodejs/startup From 1dabdbf05c49bdc7b32a6280c63329a4d787f779 Mon Sep 17 00:00:00 2001 From: SADIK KUZU Date: Thu, 4 Aug 2022 00:48:09 +0300 Subject: [PATCH 058/177] src,test: fix typos MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * src: fix typo in `src/node_messaging.h` (`postMesssage` -> `postMessage`). * test/es-module: fix typo in `test-cjs-exports.js` (`eror` -> `error`). PR-URL: https://github.com/nodejs/node/pull/44110 Reviewed-By: Feng Yu Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Antoine du Hamel Reviewed-By: Mestery Reviewed-By: Anna Henningsen Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca --- src/node_messaging.h | 2 +- test/es-module/test-esm-cjs-exports.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node_messaging.h b/src/node_messaging.h index 643604e4a99f15..6b65d4523e6ac3 100644 --- a/src/node_messaging.h +++ b/src/node_messaging.h @@ -316,7 +316,7 @@ class MessagePort : public HandleWrap { }; // Provide a base class from which JS classes that should be transferable or -// cloneable by postMesssage() can inherit. +// cloneable by postMessage() can inherit. // See e.g. FileHandle in internal/fs/promises.js for an example. class JSTransferable : public BaseObject { public: diff --git a/test/es-module/test-esm-cjs-exports.js b/test/es-module/test-esm-cjs-exports.js index 4a5a9ffceecc2a..4f79d2ce4bcb8c 100644 --- a/test/es-module/test-esm-cjs-exports.js +++ b/test/es-module/test-esm-cjs-exports.js @@ -17,7 +17,7 @@ describe('ESM: importing CJS', { concurrency: true }, () => { assert.strictEqual(stdout, 'ok\n'); }); - it('should eror on invalid CJS exports', async () => { + it('should error on invalid CJS exports', async () => { const invalidEntry = fixtures.path('/es-modules/cjs-exports-invalid.mjs'); const { code, signal, stderr } = await spawnPromisified(execPath, [invalidEntry]); From 02236032f0da8a9705bc48fd272760a6ea69e687 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 3 Aug 2022 14:54:36 -0700 Subject: [PATCH 059/177] doc: update repository list in onboarding doc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit LTS repo is now Release. readable-stream is important but a bit of a specialized topic. The others listed here are of general interest. PR-URL: https://github.com/nodejs/node/pull/44089 Reviewed-By: Antoine du Hamel Reviewed-By: Feng Yu Reviewed-By: Michaël Zasso Reviewed-By: Mestery Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Darshan Sen --- onboarding.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/onboarding.md b/onboarding.md index 24f03de0350af6..2bee19f7820805 100644 --- a/onboarding.md +++ b/onboarding.md @@ -241,8 +241,7 @@ needs to be pointed out separately during the onboarding. * * * - * - * + * * * The OpenJS Foundation hosts regular summits for active contributors to the Node.js project, where we have face-to-face discussions about our work on the From 46f8fb8e5384419f062a09fde8636f7f48fbd626 Mon Sep 17 00:00:00 2001 From: Adam Majer Date: Thu, 4 Aug 2022 01:46:23 +0200 Subject: [PATCH 060/177] doc: clarify tls.tlsSocket.getCipher().version The getCipher() returns a tuple that includes protocol version string. This string refers to the minimum protocol version string, as per documentation. What is missing is a reference to the documentation where to get the negotiated cipher for the socket connection and a clearer example. Fixes: https://github.com/nodejs/node/issues/43406 PR-URL: https://github.com/nodejs/node/pull/44086 Reviewed-By: Luigi Pinca Reviewed-By: Ben Noordhuis --- doc/api/tls.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/api/tls.md b/doc/api/tls.md index aa08ec62e948d6..1d4db99a11bf22 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1098,17 +1098,17 @@ changes: * `name` {string} OpenSSL name for the cipher suite. * `standardName` {string} IETF name for the cipher suite. * `version` {string} The minimum TLS protocol version supported by this cipher - suite. + suite. For the actual negotiated protocol, see [`tls.TLSSocket.getProtocol()`][]. Returns an object containing information on the negotiated cipher suite. -For example: +For example, a TLSv1.2 protocol with AES256-SHA cipher: ```json { - "name": "AES128-SHA256", - "standardName": "TLS_RSA_WITH_AES_128_CBC_SHA256", - "version": "TLSv1.2" + "name": "AES256-SHA", + "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA", + "version": "SSLv3" } ``` @@ -2258,6 +2258,7 @@ added: v11.4.0 [`tls.Server`]: #class-tlsserver [`tls.TLSSocket.enableTrace()`]: #tlssocketenabletrace [`tls.TLSSocket.getPeerCertificate()`]: #tlssocketgetpeercertificatedetailed +[`tls.TLSSocket.getProtocol()`]: #tlssocketgetprotocol [`tls.TLSSocket.getSession()`]: #tlssocketgetsession [`tls.TLSSocket.getTLSTicket()`]: #tlssocketgettlsticket [`tls.TLSSocket`]: #class-tlstlssocket From c3d87564d4736845fa2fa3a5a22e3667bf5801a4 Mon Sep 17 00:00:00 2001 From: Adam Majer Date: Thu, 4 Aug 2022 01:46:31 +0200 Subject: [PATCH 061/177] net, dns: socket should handle its output as input As a consequence of https://github.com/nodejs/node/issues/43014 , server sockets and others, once connected, report string family names. But when feeding these to Socket.connect(), it passes these to host resolution with a string for family while a numeric family is expected internally. This results in wrong hints flags to be set and resolution to fail. As solution, is to add ability to handle both numeric and string family names when doing lookup and connect. Fixes: https://github.com/nodejs/node/issues/44003 PR-URL: https://github.com/nodejs/node/pull/44083 Reviewed-By: Paolo Insogna Reviewed-By: Luigi Pinca --- lib/net.js | 12 +++++++++++- test/parallel/parallel.status | 10 ---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/net.js b/lib/net.js index 66fc3578ca57fa..fcc24b22fb642c 100644 --- a/lib/net.js +++ b/lib/net.js @@ -1094,6 +1094,16 @@ Socket.prototype.connect = function(...args) { return this; }; +function socketToDnsFamily(family) { + switch (family) { + case 'IPv4': + return 4; + case 'IPv6': + return 6; + } + + return family; +} function lookupAndConnect(self, options) { const { localAddress, localPort } = options; @@ -1136,7 +1146,7 @@ function lookupAndConnect(self, options) { if (dns === undefined) dns = require('dns'); const dnsopts = { - family: options.family, + family: socketToDnsFamily(options.family), hints: options.hints || 0 }; diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 911787a091b5e3..bafd4e16aaf004 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -37,10 +37,6 @@ test-crypto-dh-stateless: SKIP test-crypto-keygen: SKIP [$system==solaris] # Also applies to SmartOS -# https://github.com/nodejs/node/pull/43054 -test-net-socket-connect-without-cb: SKIP -test-net-socket-ready-without-cb: SKIP -test-tcp-wrap-listen: SKIP # https://github.com/nodejs/node/issues/43446 test-net-connect-reset-until-connected: PASS, FLAKY # https://github.com/nodejs/node/issues/43457 @@ -65,12 +61,6 @@ test-worker-message-port-message-before-close: PASS,FLAKY # https://github.com/nodejs/node/issues/43446 test-net-connect-reset-until-connected: PASS, FLAKY -[$system==aix] -# https://github.com/nodejs/node/pull/43054 -test-net-socket-connect-without-cb: SKIP -test-net-socket-ready-without-cb: SKIP -test-tcp-wrap-listen: SKIP - [$system==ibmi] # https://github.com/nodejs/node/pull/30819 test-child-process-fork-net-server: SKIP From 9990dc7d189a4c021ccc2e9cfc00fdfb3267e2d8 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Wed, 3 Aug 2022 19:18:38 -0700 Subject: [PATCH 062/177] src,buffer: remove unused chars_written parameter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This parameter was always being set to `nullptr` by its callers, either explicitly or implicitly via the default argument. It was also buggy, as in some cases it wouldn't be written to, potentially leaking stack memory (see the early returns in `StringBytes::WriteUCS2`). Remove it entirely. PR-URL: https://github.com/nodejs/node/pull/44092 Reviewed-By: Feng Yu Reviewed-By: Anna Henningsen Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Darshan Sen --- src/api/encoding.cc | 2 +- src/node_buffer.cc | 16 ++++------------ src/string_bytes.cc | 33 ++++++++------------------------- src/string_bytes.h | 6 ++---- 4 files changed, 15 insertions(+), 42 deletions(-) diff --git a/src/api/encoding.cc b/src/api/encoding.cc index f64aeee15c3b34..68278ff7371d80 100644 --- a/src/api/encoding.cc +++ b/src/api/encoding.cc @@ -150,7 +150,7 @@ ssize_t DecodeWrite(Isolate* isolate, size_t buflen, Local val, enum encoding encoding) { - return StringBytes::Write(isolate, buf, buflen, val, encoding, nullptr); + return StringBytes::Write(isolate, buf, buflen, val, encoding); } } // namespace node diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 641c5f17992daa..eb8e541c68635d 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -666,12 +666,8 @@ void Fill(const FunctionCallbackInfo& args) { // Write initial String to Buffer, then use that memory to copy remainder // of string. Correct the string length for cases like HEX where less than // the total string length is written. - str_length = StringBytes::Write(env->isolate(), - ts_obj_data + start, - fill_length, - str_obj, - enc, - nullptr); + str_length = StringBytes::Write( + env->isolate(), ts_obj_data + start, fill_length, str_obj, enc); } start_fill: @@ -730,12 +726,8 @@ void StringWrite(const FunctionCallbackInfo& args) { if (max_length == 0) return args.GetReturnValue().Set(0); - uint32_t written = StringBytes::Write(env->isolate(), - ts_obj_data + offset, - max_length, - str, - encoding, - nullptr); + uint32_t written = StringBytes::Write( + env->isolate(), ts_obj_data + offset, max_length, str, encoding); args.GetReturnValue().Set(written); } diff --git a/src/string_bytes.cc b/src/string_bytes.cc index 5b530c85477310..2f512a844f193d 100644 --- a/src/string_bytes.cc +++ b/src/string_bytes.cc @@ -260,12 +260,8 @@ static size_t hex_decode(char* buf, return i; } -size_t StringBytes::WriteUCS2(Isolate* isolate, - char* buf, - size_t buflen, - Local str, - int flags, - size_t* chars_written) { +size_t StringBytes::WriteUCS2( + Isolate* isolate, char* buf, size_t buflen, Local str, int flags) { uint16_t* const dst = reinterpret_cast(buf); size_t max_chars = buflen / sizeof(*dst); @@ -277,7 +273,6 @@ size_t StringBytes::WriteUCS2(Isolate* isolate, size_t nchars; if (aligned_dst == dst) { nchars = str->Write(isolate, dst, 0, max_chars, flags); - *chars_written = nchars; return nchars * sizeof(*dst); } @@ -285,7 +280,9 @@ size_t StringBytes::WriteUCS2(Isolate* isolate, // Write all but the last char max_chars = std::min(max_chars, static_cast(str->Length())); - if (max_chars == 0) return 0; + if (max_chars == 0) { + return 0; + } nchars = str->Write(isolate, aligned_dst, 0, max_chars - 1, flags); CHECK_EQ(nchars, max_chars - 1); @@ -298,23 +295,16 @@ size_t StringBytes::WriteUCS2(Isolate* isolate, memcpy(buf + nchars * sizeof(*dst), &last, sizeof(last)); nchars++; - *chars_written = nchars; return nchars * sizeof(*dst); } - size_t StringBytes::Write(Isolate* isolate, char* buf, size_t buflen, Local val, - enum encoding encoding, - int* chars_written) { + enum encoding encoding) { HandleScope scope(isolate); size_t nbytes; - int nchars; - - if (chars_written == nullptr) - chars_written = &nchars; CHECK(val->IsString() == true); Local str = val.As(); @@ -334,19 +324,15 @@ size_t StringBytes::Write(Isolate* isolate, uint8_t* const dst = reinterpret_cast(buf); nbytes = str->WriteOneByte(isolate, dst, 0, buflen, flags); } - *chars_written = nbytes; break; case BUFFER: case UTF8: - nbytes = str->WriteUtf8(isolate, buf, buflen, chars_written, flags); + nbytes = str->WriteUtf8(isolate, buf, buflen, nullptr, flags); break; case UCS2: { - size_t nchars; - - nbytes = WriteUCS2(isolate, buf, buflen, str, flags, &nchars); - *chars_written = static_cast(nchars); + nbytes = WriteUCS2(isolate, buf, buflen, str, flags); // Node's "ucs2" encoding wants LE character data stored in // the Buffer, so we need to reorder on BE platforms. See @@ -368,7 +354,6 @@ size_t StringBytes::Write(Isolate* isolate, String::Value value(isolate, str); nbytes = base64_decode(buf, buflen, *value, value.length()); } - *chars_written = nbytes; break; case HEX: @@ -379,7 +364,6 @@ size_t StringBytes::Write(Isolate* isolate, String::Value value(isolate, str); nbytes = hex_decode(buf, buflen, *value, value.length()); } - *chars_written = nbytes; break; default: @@ -390,7 +374,6 @@ size_t StringBytes::Write(Isolate* isolate, return nbytes; } - // Quick and dirty size calculation // Will always be at least big enough, but may have some extra // UTF8 can be as much as 3x the size, Base64 can have 1-2 extra bytes diff --git a/src/string_bytes.h b/src/string_bytes.h index 69bb828e018cb0..ad1f15b05704c8 100644 --- a/src/string_bytes.h +++ b/src/string_bytes.h @@ -75,8 +75,7 @@ class StringBytes { char* buf, size_t buflen, v8::Local val, - enum encoding enc, - int* chars_written = nullptr); + enum encoding enc); // Take the bytes in the src, and turn it into a Buffer or String. static v8::MaybeLocal Encode(v8::Isolate* isolate, @@ -111,8 +110,7 @@ class StringBytes { char* buf, size_t buflen, v8::Local str, - int flags, - size_t* chars_written); + int flags); }; } // namespace node From 8ed39397d5bc7cec166beb56d9a0a72bb4b7ecfd Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Thu, 4 Aug 2022 06:03:28 +0200 Subject: [PATCH 063/177] test_runner: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/44062 Reviewed-By: Mestery Reviewed-By: Feng Yu --- lib/internal/test_runner/test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index e2d068e87bb051..3ebb4bc0a2dff9 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -240,7 +240,7 @@ class Test extends AsyncResource { } addPendingSubtest(deferred) { - this.pendingSubtests.push(deferred); + ArrayPrototypePush(this.pendingSubtests, deferred); } async processPendingSubtests() { From da7bc5acdf25fda4e6d3c3b5ab9b3df77c0b0abf Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Wed, 3 Aug 2022 22:37:59 -0700 Subject: [PATCH 064/177] doc,worker: deprecate `--trace-atomics-wait` V8 has asked if it possible to remove the functionality underlying `--trace-atomics-wait`. Let's start with a documentation-only deprecation. PR-URL: https://github.com/nodejs/node/pull/44093 Refs: https://github.com/nodejs/node/issues/42982 Reviewed-By: Antoine du Hamel Reviewed-By: Feng Yu --- doc/api/cli.md | 3 +++ doc/api/deprecations.md | 14 ++++++++++++++ doc/node.1 | 1 + src/node_options.cc | 2 +- 4 files changed, 19 insertions(+), 1 deletion(-) diff --git a/doc/api/cli.md b/doc/api/cli.md index 2b63cc3366c204..854920b25754f2 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -1217,8 +1217,11 @@ for TLSv1.2, which is not as secure as TLSv1.3. +> Stability: 0 - Deprecated + Print short summaries of calls to [`Atomics.wait()`][] to stderr. The output could look like this: diff --git a/doc/api/deprecations.md b/doc/api/deprecations.md index b3d969c8851f4f..2fed73c7d93566 100644 --- a/doc/api/deprecations.md +++ b/doc/api/deprecations.md @@ -3169,6 +3169,19 @@ Type: Documentation-only `code` values other than `undefined`, `null`, integer numbers and integer strings (e.g., '1') are deprecated as parameter in [`process.exit()`][]. +### DEP0165: `--trace-atomics-wait` + + + +Type: Documentation-only + +The [`--trace-atomics-wait`][] flag is deprecated. + [Legacy URL API]: url.md#legacy-url-api [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 @@ -3176,6 +3189,7 @@ strings (e.g., '1') are deprecated as parameter in [`process.exit()`][]. [`"exports"` or `"main"` entry]: packages.md#main-entry-point-export [`--pending-deprecation`]: cli.md#--pending-deprecation [`--throw-deprecation`]: cli.md#--throw-deprecation +[`--trace-atomics-wait`]: cli.md#--trace-atomics-wait [`--unhandled-rejections`]: cli.md#--unhandled-rejectionsmode [`Buffer.allocUnsafeSlow(size)`]: buffer.md#static-method-bufferallocunsafeslowsize [`Buffer.from(array)`]: buffer.md#static-method-bufferfromarray diff --git a/doc/node.1 b/doc/node.1 index 2a1e80c96dfc1d..7ee4f2310f1a20 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -432,6 +432,7 @@ favour of TLSv1.3, which is more secure. Print short summaries of calls to .Sy Atomics.wait() . . +This flag is deprecated. .It Fl -trace-deprecation Print stack traces for deprecations. . diff --git a/src/node_options.cc b/src/node_options.cc index 670a04fd7292aa..2c6f03f98b9702 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -540,7 +540,7 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { &EnvironmentOptions::throw_deprecation, kAllowedInEnvironment); AddOption("--trace-atomics-wait", - "trace Atomics.wait() operations", + "(deprecated) trace Atomics.wait() operations", &EnvironmentOptions::trace_atomics_wait, kAllowedInEnvironment); AddOption("--trace-deprecation", From 8c26daff7c707e1b816f5213c77380e916ed08ce Mon Sep 17 00:00:00 2001 From: theanarkh Date: Thu, 4 Aug 2022 19:42:17 +0800 Subject: [PATCH 065/177] doc: add theanarkh to collaborators Fixes: https://github.com/nodejs/node/issues/44006 PR-URL: https://github.com/nodejs/node/pull/44131 Reviewed-By: Joyee Cheung Reviewed-By: Jiawen Geng Reviewed-By: Feng Yu Reviewed-By: LiviaMedeiros Reviewed-By: Xuguang Mei --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 78645b0e526667..f79cc099707f52 100644 --- a/README.md +++ b/README.md @@ -426,6 +426,8 @@ For information about the governance of the Node.js project, see **Stewart X Addison** <> (he/him) * [targos](https://github.com/targos) - **Michaël Zasso** <> (he/him) +* [theanarkh](https://github.com/theanarkh) - + **theanarkh** <> (he/him) * [TimothyGu](https://github.com/TimothyGu) - **Tiancheng "Timothy" Gu** <> (he/him) * [tniessen](https://github.com/tniessen) - From 116dcccc797ba20f9f9634ac8a2cd95013844208 Mon Sep 17 00:00:00 2001 From: Brian White Date: Thu, 28 Jul 2022 18:38:25 -0400 Subject: [PATCH 066/177] deps: upgrade base64 to dc6a41ce36e MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: https://github.com/nodejs/node/issues/43894 PR-URL: https://github.com/nodejs/node/pull/44032 Reviewed-By: Colin Ihrig Reviewed-By: Mestery Reviewed-By: Tobias Nießen Reviewed-By: Feng Yu Reviewed-By: Chengzhong Wu --- deps/base64/base64/defines.txt | 362 ++++++++++++++++++ deps/base64/base64/lib/arch/neon64/codec.c | 9 +- deps/base64/base64/lib/arch/neon64/enc_loop.c | 67 ---- .../base64/lib/arch/neon64/enc_loop_asm.c | 167 ++++++++ 4 files changed, 536 insertions(+), 69 deletions(-) create mode 100644 deps/base64/base64/defines.txt create mode 100644 deps/base64/base64/lib/arch/neon64/enc_loop_asm.c diff --git a/deps/base64/base64/defines.txt b/deps/base64/base64/defines.txt new file mode 100644 index 00000000000000..17e7303594dd43 --- /dev/null +++ b/deps/base64/base64/defines.txt @@ -0,0 +1,362 @@ +#define __DBL_MIN_EXP__ (-1021) +#define __LDBL_MANT_DIG__ 113 +#define STOR(A,B,C,D) "st4 {"A".16b, "B".16b, "C".16b, "D".16b}, [%[dst]], #64 \n\t" +#define ROUND_A() SHUF("v2", "v3", "v4") LOAD("v12", "v13", "v14") TRAN("v2", "v3", "v4", "v5") STOR("v2", "v3", "v4", "v5") +#define __UINT_LEAST16_MAX__ 0xffff +#define __ARM_SIZEOF_WCHAR_T 4 +#define __DBL_DECIMAL_DIG__ 17 +#define __ATOMIC_ACQUIRE 2 +#define __FLT128_MAX_10_EXP__ 4932 +#define __FLT_MIN__ 1.17549435082228750796873653722224568e-38F +#define __GCC_IEC_559_COMPLEX 2 +#define __UINT_LEAST8_TYPE__ unsigned char +#define __FLT128_DIG__ 33 +#define __INTMAX_C(c) c ## L +#define __CHAR_BIT__ 8 +#define __UINT8_MAX__ 0xff +#define __WCHAR_MAX__ 0xffffffffU +#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_2 1 +#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 1 +#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 1 +#define __GCC_ATOMIC_CHAR_LOCK_FREE 2 +#define __GCC_IEC_559 2 +#define __FLT32X_DECIMAL_DIG__ 17 +#define __FLT_EVAL_METHOD__ 0 +#define HAVE_NEON64 1 +#define __FLT64_DECIMAL_DIG__ 17 +#define __GCC_ATOMIC_CHAR32_T_LOCK_FREE 2 +#define __UINT_FAST32_TYPE__ long unsigned int +#define __UINT_FAST64_MAX__ 0xffffffffffffffffUL +#define __SIG_ATOMIC_TYPE__ int +#define __DBL_MIN_10_EXP__ (-307) +#define __FINITE_MATH_ONLY__ 0 +#define __FLT32X_MAX_EXP__ 1024 +#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_1 1 +#define __GNUC_PATCHLEVEL__ 1 +#define __FLT32_HAS_DENORM__ 1 +#define __UINT_FAST8_MAX__ 0xff +#define __INT8_C(c) c +#define __INT_LEAST8_WIDTH__ 8 +#define __INTMAX_TYPE__ long int +#define __UINT_LEAST64_MAX__ 0xffffffffffffffffUL +#define __SHRT_MAX__ 0x7fff +#define __STDC_ISO_10646__ 201706L +#define __LDBL_MAX__ 1.18973149535723176508575932662800702e+4932L +#define __ARM_FEATURE_IDIV 1 +#define __FLT64X_MAX_10_EXP__ 4932 +#define __ARM_FP 14 +#define __FLT64X_HAS_QUIET_NAN__ 1 +#define __WINT_TYPE__ unsigned int +#define __UINT_LEAST8_MAX__ 0xff +#define __FLT128_DENORM_MIN__ 6.47517511943802511092443895822764655e-4966F128 +#define __UINTMAX_TYPE__ long unsigned int +#define _STDC_PREDEF_H 1 +#define __linux 1 +#define __FLT_EVAL_METHOD_TS_18661_3__ 0 +#define __CHAR_UNSIGNED__ 1 +#define __UINT32_MAX__ 0xffffffffU +#define __DBL_DENORM_MIN__ ((double)4.94065645841246544176568792868221372e-324L) +#define __AARCH64_CMODEL_SMALL__ 1 +#define __LDBL_MAX_EXP__ 16384 +#define __INT_FAST32_WIDTH__ 64 +#define __FLT128_MIN_EXP__ (-16381) +#define __FLT128_MIN_10_EXP__ (-4931) +#define __INT_LEAST16_WIDTH__ 16 +#define __FLT64X_MIN_EXP__ (-16381) +#define __SCHAR_MAX__ 0x7f +#define __FLT128_MANT_DIG__ 113 +#define __DBL_MAX__ ((double)1.79769313486231570814527423731704357e+308L) +#define __WCHAR_MIN__ 0U +#define __INT64_C(c) c ## L +#define __GCC_ATOMIC_POINTER_LOCK_FREE 2 +#define __SIZEOF_INT__ 4 +#define __INT_FAST64_WIDTH__ 64 +#define __PRAGMA_REDEFINE_EXTNAME 1 +#define __FLT32X_MANT_DIG__ 53 +#define __USER_LABEL_PREFIX__ +#define __FLT32_MAX_10_EXP__ 38 +#define __FLT64X_EPSILON__ 1.92592994438723585305597794258492732e-34F64x +#define __STDC_HOSTED__ 1 +#define __DBL_DIG__ 15 +#define __FLT32_DIG__ 6 +#define __FLT_EPSILON__ 1.19209289550781250000000000000000000e-7F +#define __SHRT_WIDTH__ 16 +#define __LDBL_MIN__ 3.36210314311209350626267781732175260e-4932L +#define __STDC_UTF_16__ 1 +#define __FLT16_HAS_QUIET_NAN__ 1 +#define __ARM_SIZEOF_MINIMAL_ENUM 4 +#define __FLT64X_DENORM_MIN__ 6.47517511943802511092443895822764655e-4966F64x +#define __FP_FAST_FMA 1 +#define __FLT32X_HAS_INFINITY__ 1 +#define __INT32_MAX__ 0x7fffffff +#define __FLT16_DIG__ 3 +#define __INT_WIDTH__ 32 +#define __SIZEOF_LONG__ 8 +#define __STDC_IEC_559__ 1 +#define __UINT16_C(c) c +#define __DECIMAL_DIG__ 36 +#define __STDC_IEC_559_COMPLEX__ 1 +#define __FLT64_EPSILON__ 2.22044604925031308084726333618164062e-16F64 +#define __gnu_linux__ 1 +#define __INT16_MAX__ 0x7fff +#define LOAD(A,B,C) "ld3 {"A".16b, "B".16b, "C".16b}, [%[src]], #48 \n\t" +#define __FLT64X_MIN_10_EXP__ (-4931) +#define __LDBL_HAS_QUIET_NAN__ 1 +#define __FLT16_MIN_EXP__ (-13) +#define __FLT64_MANT_DIG__ 53 +#define __FLT64X_MANT_DIG__ 113 +#define __GNUC__ 10 +#define __FLT_HAS_DENORM__ 1 +#define __SIZEOF_LONG_DOUBLE__ 16 +#define __LDBL_MIN_EXP__ (-16381) +#define __FLT64_MAX_10_EXP__ 308 +#define __FLT16_MAX_10_EXP__ 4 +#define __INT_FAST32_MAX__ 0x7fffffffffffffffL +#define __DBL_HAS_INFINITY__ 1 +#define __HAVE_SPECULATION_SAFE_VALUE 1 +#define __INTPTR_WIDTH__ 64 +#define __FLT32X_HAS_DENORM__ 1 +#define __INT_FAST16_TYPE__ long int +#define __LDBL_HAS_DENORM__ 1 +#define __FLT128_HAS_INFINITY__ 1 +#define __FLT32_DECIMAL_DIG__ 9 +#define __DBL_MAX_EXP__ 1024 +#define __WCHAR_WIDTH__ 32 +#define __FLT32_MAX__ 3.40282346638528859811704183484516925e+38F32 +#define __GCC_ATOMIC_LONG_LOCK_FREE 2 +#define __FLT16_DECIMAL_DIG__ 5 +#define __FLT32_HAS_QUIET_NAN__ 1 +#define __LONG_LONG_MAX__ 0x7fffffffffffffffLL +#define __SIZEOF_SIZE_T__ 8 +#define __SIG_ATOMIC_WIDTH__ 32 +#define __ARM_ALIGN_MAX_PWR 28 +#define __SIZEOF_WINT_T__ 4 +#define __LONG_LONG_WIDTH__ 64 +#define __FLT32_MAX_EXP__ 128 +#define __ARM_FP16_FORMAT_IEEE 1 +#define __FLT_MIN_EXP__ (-125) +#define __FLT64_NORM_MAX__ 1.79769313486231570814527423731704357e+308F64 +#define __GCC_HAVE_DWARF2_CFI_ASM 1 +#define __FLT32X_MIN_EXP__ (-1021) +#define __INT_FAST64_TYPE__ long int +#define __ARM_FP16_ARGS 1 +#define __FP_FAST_FMAF 1 +#define __FLT128_NORM_MAX__ 1.18973149535723176508575932662800702e+4932F128 +#define __FLT64_DENORM_MIN__ 4.94065645841246544176568792868221372e-324F64 +#define __DBL_MIN__ ((double)2.22507385850720138309023271733240406e-308L) +#define __ARM_FEATURE_CLZ 1 +#define __FLT16_DENORM_MIN__ 5.96046447753906250000000000000000000e-8F16 +#define __unix__ 1 +#define __FLT64X_NORM_MAX__ 1.18973149535723176508575932662800702e+4932F64x +#define __SIZEOF_POINTER__ 8 +#define __GXX_ABI_VERSION 1014 +#define __LP64__ 1 +#define __DBL_HAS_QUIET_NAN__ 1 +#define __FLT_EVAL_METHOD_C99__ 0 +#define __FLT32X_EPSILON__ 2.22044604925031308084726333618164062e-16F32x +#define __FLT64_MIN_EXP__ (-1021) +#define __UINT64_MAX__ 0xffffffffffffffffUL +#define __LDBL_DECIMAL_DIG__ 36 +#define __FLT_MAX__ 3.40282346638528859811704183484516925e+38F +#define __aarch64__ 1 +#define __FLT64_MIN_10_EXP__ (-307) +#define __FLT64X_DECIMAL_DIG__ 36 +#define __REGISTER_PREFIX__ +#define __UINT16_MAX__ 0xffff +#define __INTMAX_WIDTH__ 64 +#define __LDBL_HAS_INFINITY__ 1 +#define __FLT32_MIN__ 1.17549435082228750796873653722224568e-38F32 +#define __FLT_DIG__ 6 +#define __NO_INLINE__ 1 +#define __DEC_EVAL_METHOD__ 2 +#define __FLT_MANT_DIG__ 24 +#define __FLT16_MIN_10_EXP__ (-4) +#define __VERSION__ "10.3.1 20210621" +#define __UINT64_C(c) c ## UL +#define __WINT_MAX__ 0xffffffffU +#define __INT_LEAST32_MAX__ 0x7fffffff +#define __GCC_ATOMIC_INT_LOCK_FREE 2 +#define __FLT32X_MIN__ 2.22507385850720138309023271733240406e-308F32x +#define __FLT128_MAX_EXP__ 16384 +#define __FLT32_MANT_DIG__ 24 +#define __FLOAT_WORD_ORDER__ __ORDER_LITTLE_ENDIAN__ +#define __FLT16_MAX_EXP__ 16 +#define __BIGGEST_ALIGNMENT__ 16 +#define __INT32_C(c) c +#define __FLT128_HAS_DENORM__ 1 +#define __SCHAR_WIDTH__ 8 +#define __ORDER_PDP_ENDIAN__ 3412 +#define __ARM_64BIT_STATE 1 +#define __INT_FAST32_TYPE__ long int +#define ROUND() LOAD("v12", "v13", "v14") SHUF("v12", "v13", "v14") TRAN("v12", "v13", "v14", "v15") STOR("v12", "v13", "v14", "v15") +#define __UINT_LEAST16_TYPE__ short unsigned int +#define __SIZE_TYPE__ long unsigned int +#define __FLT64X_DIG__ 33 +#define __ARM_FEATURE_FMA 1 +#define __INT8_TYPE__ signed char +#define __ELF__ 1 +#define __GCC_ASM_FLAG_OUTPUTS__ 1 +#define __GCC_ATOMIC_TEST_AND_SET_TRUEVAL 1 +#define __FLT_RADIX__ 2 +#define __INT_LEAST16_TYPE__ short int +#define __ARM_ARCH_PROFILE 65 +#define __LDBL_EPSILON__ 1.92592994438723585305597794258492732e-34L +#define __UINTMAX_C(c) c ## UL +#define __ARM_PCS_AAPCS64 1 +#define __SIG_ATOMIC_MAX__ 0x7fffffff +#define __GCC_ATOMIC_WCHAR_T_LOCK_FREE 2 +#define __SIZEOF_PTRDIFF_T__ 8 +#define __ATOMIC_RELAXED 0 +#define __LDBL_DIG__ 33 +#define __AARCH64EL__ 1 +#define __INT_FAST16_MAX__ 0x7fffffffffffffffL +#define __FLT64_DIG__ 15 +#define __UINT_FAST32_MAX__ 0xffffffffffffffffUL +#define __UINT_LEAST64_TYPE__ long unsigned int +#define __FLT16_EPSILON__ 9.76562500000000000000000000000000000e-4F16 +#define __FLT_HAS_QUIET_NAN__ 1 +#define __FLT_MAX_10_EXP__ 38 +#define __LONG_MAX__ 0x7fffffffffffffffL +#define ROUND_A_FIRST() LOAD("v2", "v3", "v4") ROUND_A() +#define __FLT64X_HAS_DENORM__ 1 +#define __FLT_HAS_INFINITY__ 1 +#define ROUND_B() SHUF("v12", "v13", "v14") LOAD("v2", "v3", "v4") TRAN("v12", "v13", "v14", "v15") STOR("v12", "v13", "v14", "v15") +#define __unix 1 +#define __DBL_HAS_DENORM__ 1 +#define __UINT_FAST16_TYPE__ long unsigned int +#define __FLT32X_HAS_QUIET_NAN__ 1 +#define __CHAR16_TYPE__ short unsigned int +#define __FLT64X_MAX_EXP__ 16384 +#define __SIZE_WIDTH__ 64 +#define __INT_LEAST16_MAX__ 0x7fff +#define __FLT16_NORM_MAX__ 6.55040000000000000000000000000000000e+4F16 +#define __INT64_MAX__ 0x7fffffffffffffffL +#define __FLT32_DENORM_MIN__ 1.40129846432481707092372958328991613e-45F32 +#define __INT_LEAST64_TYPE__ long int +#define __INT16_TYPE__ short int +#define __INT_LEAST8_TYPE__ signed char +#define __FLT16_MAX__ 6.55040000000000000000000000000000000e+4F16 +#define __STDC_VERSION__ 201710L +#define __INT_FAST8_MAX__ 0x7f +#define __ARM_ARCH 8 +#define __FLT128_MAX__ 1.18973149535723176508575932662800702e+4932F128 +#define __INTPTR_MAX__ 0x7fffffffffffffffL +#define linux 1 +#define __ARM_FEATURE_UNALIGNED 1 +#define __FLT64_HAS_QUIET_NAN__ 1 +#define __FLT32_MIN_10_EXP__ (-37) +#define __FLT32X_DIG__ 15 +#define __UINT8_TYPE__ unsigned char +#define __PTRDIFF_WIDTH__ 64 +#define __FLT64_HAS_INFINITY__ 1 +#define __FLT64X_MAX__ 1.18973149535723176508575932662800702e+4932F64x +#define __FLT16_HAS_INFINITY__ 1 +#define __SIG_ATOMIC_MIN__ (-__SIG_ATOMIC_MAX__ - 1) +#define __PTRDIFF_MAX__ 0x7fffffffffffffffL +#define __FLT16_MANT_DIG__ 11 +#define __INTPTR_TYPE__ long int +#define __UINT16_TYPE__ short unsigned int +#define __WCHAR_TYPE__ unsigned int +#define __UINTPTR_MAX__ 0xffffffffffffffffUL +#define __ARM_ARCH_8A 1 +#define __INT_FAST64_MAX__ 0x7fffffffffffffffL +#define __FLT_NORM_MAX__ 3.40282346638528859811704183484516925e+38F +#define __FLT32_HAS_INFINITY__ 1 +#define __UINT_FAST64_TYPE__ long unsigned int +#define __INT_MAX__ 0x7fffffff +#define __INT64_TYPE__ long int +#define __FLT_MAX_EXP__ 128 +#define __ORDER_BIG_ENDIAN__ 4321 +#define __DBL_MANT_DIG__ 53 +#define __INT_LEAST64_MAX__ 0x7fffffffffffffffL +#define __GCC_ATOMIC_CHAR16_T_LOCK_FREE 2 +#define __FP_FAST_FMAF32 1 +#define __UINT_LEAST32_TYPE__ unsigned int +#define __SIZEOF_SHORT__ 2 +#define __FLT32_NORM_MAX__ 3.40282346638528859811704183484516925e+38F32 +#define __GCC_ATOMIC_BOOL_LOCK_FREE 2 +#define __FLT64_MAX__ 1.79769313486231570814527423731704357e+308F64 +#define __WINT_WIDTH__ 32 +#define __FP_FAST_FMAF64 1 +#define __INT_LEAST8_MAX__ 0x7f +#define __INT_LEAST64_WIDTH__ 64 +#define __FLT32X_MAX_10_EXP__ 308 +#define __SIZEOF_INT128__ 16 +#define __FLT16_MIN__ 6.10351562500000000000000000000000000e-5F16 +#define __LDBL_MAX_10_EXP__ 4932 +#define __DBL_EPSILON__ ((double)2.22044604925031308084726333618164062e-16L) +#define __FLT32_MIN_EXP__ (-125) +#define __FLT128_MIN__ 3.36210314311209350626267781732175260e-4932F128 +#define _LP64 1 +#define __UINT8_C(c) c +#define __FLT64_MAX_EXP__ 1024 +#define __INT_LEAST32_TYPE__ int +#define __UINT64_TYPE__ long unsigned int +#define __ARM_NEON 1 +#define __FLT128_HAS_QUIET_NAN__ 1 +#define __INTMAX_MAX__ 0x7fffffffffffffffL +#define __UINT_FAST8_TYPE__ unsigned char +#define __INT_FAST8_TYPE__ signed char +#define __FLT64X_MIN__ 3.36210314311209350626267781732175260e-4932F64x +#define __GNUC_STDC_INLINE__ 1 +#define __FLT64_HAS_DENORM__ 1 +#define __FLT32_EPSILON__ 1.19209289550781250000000000000000000e-7F32 +#define __FP_FAST_FMAF32x 1 +#define __FLT16_HAS_DENORM__ 1 +#define __STDC_UTF_32__ 1 +#define __INT_FAST8_WIDTH__ 8 +#define __FLT32X_MAX__ 1.79769313486231570814527423731704357e+308F32x +#define __DBL_NORM_MAX__ ((double)1.79769313486231570814527423731704357e+308L) +#define __FLT64X_HAS_INFINITY__ 1 +#define __BYTE_ORDER__ __ORDER_LITTLE_ENDIAN__ +#define __ARM_ALIGN_MAX_STACK_PWR 16 +#define __LDBL_DENORM_MIN__ 6.47517511943802511092443895822764655e-4966L +#define __SIZEOF_WCHAR_T__ 4 +#define unix 1 +#define __UINT32_C(c) c ## U +#define __FLT_DENORM_MIN__ 1.40129846432481707092372958328991613e-45F +#define __WINT_MIN__ 0U +#define __INT8_MAX__ 0x7f +#define __LONG_WIDTH__ 64 +#define __FLT32X_NORM_MAX__ 1.79769313486231570814527423731704357e+308F32x +#define __CHAR32_TYPE__ unsigned int +#define __ARM_FEATURE_NUMERIC_MAXMIN 1 +#define __INT32_TYPE__ int +#define __SIZEOF_DOUBLE__ 8 +#define __FLT_MIN_10_EXP__ (-37) +#define __FLT64_MIN__ 2.22507385850720138309023271733240406e-308F64 +#define __INT_LEAST32_WIDTH__ 32 +#define __SIZEOF_FLOAT__ 4 +#define __ATOMIC_CONSUME 1 +#define __GNUC_MINOR__ 3 +#define __INT_FAST16_WIDTH__ 64 +#define __UINTMAX_MAX__ 0xffffffffffffffffUL +#define __FLT32X_DENORM_MIN__ 4.94065645841246544176568792868221372e-324F32x +#define SHUF(A,B,C) "ushr %[t0].16b, "A".16b, #2 \n\t" "ushr %[t1].16b, "B".16b, #4 \n\t" "ushr %[t2].16b, "C".16b, #6 \n\t" "sli %[t1].16b, "A".16b, #4 \n\t" "sli %[t2].16b, "B".16b, #2 \n\t" "and %[t1].16b, %[t1].16b, %[n63].16b \n\t" "and %[t2].16b, %[t2].16b, %[n63].16b \n\t" "and %[t3].16b, "C".16b, %[n63].16b \n\t" +#define __DBL_MAX_10_EXP__ 308 +#define __INT16_C(c) c +#define __ARM_ARCH_ISA_A64 1 +#define __STDC__ 1 +#define __PTRDIFF_TYPE__ long int +#define TRAN(A,B,C,D) "tbl "A".16b, {v8.16b-v11.16b}, %[t0].16b \n\t" "tbl "B".16b, {v8.16b-v11.16b}, %[t1].16b \n\t" "tbl "C".16b, {v8.16b-v11.16b}, %[t2].16b \n\t" "tbl "D".16b, {v8.16b-v11.16b}, %[t3].16b \n\t" +#define __ATOMIC_SEQ_CST 5 +#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_16 1 +#define ROUND_B_LAST() SHUF("v12", "v13", "v14") TRAN("v12", "v13", "v14", "v15") STOR("v12", "v13", "v14", "v15") +#define __UINT32_TYPE__ unsigned int +#define __FLT32X_MIN_10_EXP__ (-307) +#define __UINTPTR_TYPE__ long unsigned int +#define __linux__ 1 +#define __LDBL_MIN_10_EXP__ (-4931) +#define __FLT128_EPSILON__ 1.92592994438723585305597794258492732e-34F128 +#define __SIZEOF_LONG_LONG__ 8 +#define __FLT128_DECIMAL_DIG__ 36 +#define __GCC_ATOMIC_LLONG_LOCK_FREE 2 +#define __FLT_DECIMAL_DIG__ 9 +#define __UINT_FAST16_MAX__ 0xffffffffffffffffUL +#define __LDBL_NORM_MAX__ 1.18973149535723176508575932662800702e+4932L +#define __GCC_ATOMIC_SHORT_LOCK_FREE 2 +#define __ORDER_LITTLE_ENDIAN__ 1234 +#define __SIZE_MAX__ 0xffffffffffffffffUL +#define __UINT_LEAST32_MAX__ 0xffffffffU +#define __ATOMIC_ACQ_REL 4 +#define __ATOMIC_RELEASE 3 diff --git a/deps/base64/base64/lib/arch/neon64/codec.c b/deps/base64/base64/lib/arch/neon64/codec.c index fc953b23e7f9b2..79789bb7d0ecbc 100644 --- a/deps/base64/base64/lib/arch/neon64/codec.c +++ b/deps/base64/base64/lib/arch/neon64/codec.c @@ -58,8 +58,13 @@ load_64byte_table (const uint8_t *p) #include "../generic/32/dec_loop.c" #include "../generic/64/enc_loop.c" #include "dec_loop.c" -#include "enc_reshuffle.c" -#include "enc_loop.c" + +#ifdef BASE64_NEON64_USE_ASM +# include "enc_loop_asm.c" +#else +# include "enc_reshuffle.c" +# include "enc_loop.c" +#endif #endif // BASE64_USE_NEON64 diff --git a/deps/base64/base64/lib/arch/neon64/enc_loop.c b/deps/base64/base64/lib/arch/neon64/enc_loop.c index d1862f7a3aadf2..59a1c59728a139 100644 --- a/deps/base64/base64/lib/arch/neon64/enc_loop.c +++ b/deps/base64/base64/lib/arch/neon64/enc_loop.c @@ -1,72 +1,6 @@ -#ifdef BASE64_NEON64_USE_ASM -static inline void -enc_loop_neon64_inner_asm (const uint8_t **s, uint8_t **o, const uint8x16x4_t tbl_enc) -{ - // This function duplicates the functionality of enc_loop_neon64_inner, - // but entirely with inline assembly. This gives a significant speedup - // over using NEON intrinsics, which do not always generate very good - // code. The logic of the assembly is directly lifted from the - // intrinsics version, so it can be used as a guide to this code. - - // Temporary registers, used as scratch space. - uint8x16_t tmp0, tmp1, tmp2, tmp3; - - // Numeric constant. - const uint8x16_t n63 = vdupq_n_u8(63); - - __asm__ ( - - // Load 48 bytes and deinterleave. The bytes are loaded to - // hard-coded registers v12, v13 and v14, to ensure that they - // are contiguous. Increment the source pointer. - "ld3 {v12.16b, v13.16b, v14.16b}, [%[src]], #48 \n\t" - - // Reshuffle the bytes using temporaries. - "ushr %[t0].16b, v12.16b, #2 \n\t" - "ushr %[t1].16b, v13.16b, #4 \n\t" - "ushr %[t2].16b, v14.16b, #6 \n\t" - "sli %[t1].16b, v12.16b, #4 \n\t" - "sli %[t2].16b, v13.16b, #2 \n\t" - "and %[t1].16b, %[t1].16b, %[n63].16b \n\t" - "and %[t2].16b, %[t2].16b, %[n63].16b \n\t" - "and %[t3].16b, v14.16b, %[n63].16b \n\t" - - // Translate the values to the Base64 alphabet. - "tbl v12.16b, {%[l0].16b, %[l1].16b, %[l2].16b, %[l3].16b}, %[t0].16b \n\t" - "tbl v13.16b, {%[l0].16b, %[l1].16b, %[l2].16b, %[l3].16b}, %[t1].16b \n\t" - "tbl v14.16b, {%[l0].16b, %[l1].16b, %[l2].16b, %[l3].16b}, %[t2].16b \n\t" - "tbl v15.16b, {%[l0].16b, %[l1].16b, %[l2].16b, %[l3].16b}, %[t3].16b \n\t" - - // Store 64 bytes and interleave. Increment the dest pointer. - "st4 {v12.16b, v13.16b, v14.16b, v15.16b}, [%[dst]], #64 \n\t" - - // Outputs (modified). - : [src] "+r" (*s), - [dst] "+r" (*o), - [t0] "=&w" (tmp0), - [t1] "=&w" (tmp1), - [t2] "=&w" (tmp2), - [t3] "=&w" (tmp3) - - // Inputs (not modified). - : [n63] "w" (n63), - [l0] "w" (tbl_enc.val[0]), - [l1] "w" (tbl_enc.val[1]), - [l2] "w" (tbl_enc.val[2]), - [l3] "w" (tbl_enc.val[3]) - - // Clobbers. - : "v12", "v13", "v14", "v15" - ); -} -#endif - static inline void enc_loop_neon64_inner (const uint8_t **s, uint8_t **o, const uint8x16x4_t tbl_enc) { -#ifdef BASE64_NEON64_USE_ASM - enc_loop_neon64_inner_asm(s, o, tbl_enc); -#else // Load 48 bytes and deinterleave: uint8x16x3_t src = vld3q_u8(*s); @@ -86,7 +20,6 @@ enc_loop_neon64_inner (const uint8_t **s, uint8_t **o, const uint8x16x4_t tbl_en *s += 48; *o += 64; -#endif } static inline void diff --git a/deps/base64/base64/lib/arch/neon64/enc_loop_asm.c b/deps/base64/base64/lib/arch/neon64/enc_loop_asm.c new file mode 100644 index 00000000000000..cf2fd27e80d2ca --- /dev/null +++ b/deps/base64/base64/lib/arch/neon64/enc_loop_asm.c @@ -0,0 +1,167 @@ +// Apologies in advance for combining the preprocessor with inline assembly, +// two notoriously gnarly parts of C, but it was necessary to avoid a lot of +// code repetition. The preprocessor is used to template large sections of +// inline assembly that differ only in the registers used. If the code was +// written out by hand, it would become very large and hard to audit. + +// Generate a block of inline assembly that loads three user-defined registers +// A, B, C from memory and deinterleaves them, post-incrementing the src +// pointer. The register set should be sequential. +#define LOAD(A, B, C) \ + "ld3 {"A".16b, "B".16b, "C".16b}, [%[src]], #48 \n\t" + +// Generate a block of inline assembly that takes three deinterleaved registers +// and shuffles the bytes. The output is in temporary registers t0..t3. +#define SHUF(A, B, C) \ + "ushr %[t0].16b, "A".16b, #2 \n\t" \ + "ushr %[t1].16b, "B".16b, #4 \n\t" \ + "ushr %[t2].16b, "C".16b, #6 \n\t" \ + "sli %[t1].16b, "A".16b, #4 \n\t" \ + "sli %[t2].16b, "B".16b, #2 \n\t" \ + "and %[t1].16b, %[t1].16b, %[n63].16b \n\t" \ + "and %[t2].16b, %[t2].16b, %[n63].16b \n\t" \ + "and %[t3].16b, "C".16b, %[n63].16b \n\t" + +// Generate a block of inline assembly that takes temporary registers t0..t3 +// and translates them to the base64 alphabet, using a table loaded into +// v8..v11. The output is in user-defined registers A..D. +#define TRAN(A, B, C, D) \ + "tbl "A".16b, {v8.16b-v11.16b}, %[t0].16b \n\t" \ + "tbl "B".16b, {v8.16b-v11.16b}, %[t1].16b \n\t" \ + "tbl "C".16b, {v8.16b-v11.16b}, %[t2].16b \n\t" \ + "tbl "D".16b, {v8.16b-v11.16b}, %[t3].16b \n\t" + +// Generate a block of inline assembly that interleaves four registers and +// stores them, post-incrementing the destination pointer. +#define STOR(A, B, C, D) \ + "st4 {"A".16b, "B".16b, "C".16b, "D".16b}, [%[dst]], #64 \n\t" + +// Generate a block of inline assembly that generates a single self-contained +// encoder round: fetch the data, process it, and store the result. +#define ROUND() \ + LOAD("v12", "v13", "v14") \ + SHUF("v12", "v13", "v14") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// Generate a block of assembly that generates a type A interleaved encoder +// round. It uses registers that were loaded by the previous type B round, and +// in turn loads registers for the next type B round. +#define ROUND_A() \ + SHUF("v2", "v3", "v4") \ + LOAD("v12", "v13", "v14") \ + TRAN("v2", "v3", "v4", "v5") \ + STOR("v2", "v3", "v4", "v5") + +// Type B interleaved encoder round. Same as type A, but register sets swapped. +#define ROUND_B() \ + SHUF("v12", "v13", "v14") \ + LOAD("v2", "v3", "v4") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// The first type A round needs to load its own registers. +#define ROUND_A_FIRST() \ + LOAD("v2", "v3", "v4") \ + ROUND_A() + +// The last type B round omits the load for the next step. +#define ROUND_B_LAST() \ + SHUF("v12", "v13", "v14") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// Suppress clang's warning that the literal string in the asm statement is +// overlong (longer than the ISO-mandated minimum size of 4095 bytes for C99 +// compilers). It may be true, but the goal here is not C99 portability. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Woverlength-strings" + +static inline void +enc_loop_neon64 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + size_t rounds = *slen / 48; + + if (rounds == 0) { + return; + } + + *slen -= rounds * 48; // 48 bytes consumed per round. + *olen += rounds * 64; // 64 bytes produced per round. + + // Number of times to go through the 8x loop. + size_t loops = rounds / 8; + + // Number of rounds remaining after the 8x loop. + rounds %= 8; + + // Temporary registers, used as scratch space. + uint8x16_t tmp0, tmp1, tmp2, tmp3; + + __asm__ volatile ( + + // Load the encoding table into v8..v11. + " ld1 {v8.16b-v11.16b}, [%[tbl]] \n\t" + + // If there are eight rounds or more, enter an 8x unrolled loop + // of interleaved encoding rounds. The rounds interleave memory + // operations (load/store) with data operations to maximize + // pipeline throughput. + " cbz %[loops], 4f \n\t" + + // The SIMD instructions do not touch the flags. + "88: subs %[loops], %[loops], #1 \n\t" + " " ROUND_A_FIRST() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B_LAST() + " b.ne 88b \n\t" + + // Enter a 4x unrolled loop for rounds of 4 or more. + "4: cmp %[rounds], #4 \n\t" + " b.lt 30f \n\t" + " " ROUND_A_FIRST() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B_LAST() + " sub %[rounds], %[rounds], #4 \n\t" + + // Dispatch the remaining rounds 0..3. + "30: cbz %[rounds], 0f \n\t" + " cmp %[rounds], #2 \n\t" + " b.eq 2f \n\t" + " b.lt 1f \n\t" + + // Block of non-interlaced encoding rounds, which can each + // individually be jumped to. Rounds fall through to the next. + "3: " ROUND() + "2: " ROUND() + "1: " ROUND() + "0: \n\t" + + // Outputs (modified). + : [loops] "+r" (loops), + [src] "+r" (*s), + [dst] "+r" (*o), + [t0] "=&w" (tmp0), + [t1] "=&w" (tmp1), + [t2] "=&w" (tmp2), + [t3] "=&w" (tmp3) + + // Inputs (not modified). + : [rounds] "r" (rounds), + [tbl] "r" (base64_table_enc_6bit), + [n63] "w" (vdupq_n_u8(63)) + + // Clobbers. + : "v2", "v3", "v4", "v5", + "v8", "v9", "v10", "v11", + "v12", "v13", "v14", "v15" + ); +} + +#pragma GCC diagnostic pop From e7d101fbd4930afb1d3a91f70ebbeed8d4f75b93 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Thu, 28 Jul 2022 14:58:00 +0800 Subject: [PATCH 067/177] bootstrap: support more builtins in the embedded code cache This patch: - Make NativeModuleLoader::LookupAndCompile() detect parameters based on module IDs. This allows us to compile more builtins when generating the embedded bootstrap, including - internal/per_context/* - internal/bootstrap/* - internal/main/* - Move pre_execution.js to lib/internal/process as it needs to be compiled as a regular built-in module, unlike other scripts in lib/internal/bootstrap - Move markBootstrapComplete() to the performance binding instead of making it a function-wrapper-based global to reduce number of special cases. PR-URL: https://github.com/nodejs/node/pull/44018 Reviewed-By: Chengzhong Wu Reviewed-By: Anna Henningsen --- lib/internal/bootstrap/loaders.js | 2 + lib/internal/bootstrap/node.js | 4 +- lib/internal/main/.eslintrc.yaml | 2 - lib/internal/main/check_syntax.js | 5 +- .../{bootstrap => main}/environment.js | 6 +- lib/internal/main/eval_stdin.js | 5 +- lib/internal/main/eval_string.js | 5 +- lib/internal/main/inspect.js | 5 +- lib/internal/main/mksnapshot.js | 2 +- lib/internal/main/print_help.js | 5 +- lib/internal/main/prof_process.js | 5 +- lib/internal/main/repl.js | 5 +- lib/internal/main/run_main_module.js | 5 +- lib/internal/main/test_runner.js | 3 +- lib/internal/main/worker_thread.js | 5 +- .../{bootstrap => process}/pre_execution.js | 7 +- lib/internal/v8/startup_snapshot.js | 6 +- lib/internal/wasm_web_api.js | 2 +- src/api/environment.cc | 15 ++- src/node.cc | 68 ++++---------- src/node_internals.h | 1 - src/node_native_module.cc | 91 +++++++++++++------ src/node_native_module.h | 8 +- src/node_perf.cc | 8 ++ src/node_snapshotable.cc | 5 +- test/parallel/test-bootstrap-modules.js | 2 +- test/parallel/test-code-cache.js | 16 +--- 27 files changed, 156 insertions(+), 137 deletions(-) delete mode 100644 lib/internal/main/.eslintrc.yaml rename lib/internal/{bootstrap => main}/environment.js (69%) rename lib/internal/{bootstrap => process}/pre_execution.js (99%) diff --git a/lib/internal/bootstrap/loaders.js b/lib/internal/bootstrap/loaders.js index 41162fabcbc589..4c7a9acef2e539 100644 --- a/lib/internal/bootstrap/loaders.js +++ b/lib/internal/bootstrap/loaders.js @@ -326,6 +326,8 @@ class NativeModule { requireWithFallbackInDeps : nativeModuleRequire; const fn = compileFunction(id); + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). fn(this.exports, requireFn, this, process, internalBinding, primordials); this.loaded = true; diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index df2afb84b5a6ca..14eb4bbcca472d 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -6,7 +6,7 @@ // // This file is expected not to perform any asynchronous operations itself // when being executed - those should be done in either -// `lib/internal/bootstrap/pre_execution.js` or in main scripts. The majority +// `lib/internal/process/pre_execution.js` or in main scripts. The majority // of the code here focuses on setting up the global proxy and the process // object in a synchronous manner. // As special caution is given to the performance of the startup process, @@ -28,7 +28,7 @@ // Then, depending on how the Node.js instance is launched, one of the main // scripts in `lib/internal/main` will be selected by C++ to start the actual // execution. They may run additional setups exported by -// `lib/internal/bootstrap/pre_execution.js` depending on the runtime states. +// `lib/internal/process/pre_execution.js` depending on the runtime states. 'use strict'; diff --git a/lib/internal/main/.eslintrc.yaml b/lib/internal/main/.eslintrc.yaml deleted file mode 100644 index dfb75077782301..00000000000000 --- a/lib/internal/main/.eslintrc.yaml +++ /dev/null @@ -1,2 +0,0 @@ -globals: - markBootstrapComplete: true diff --git a/lib/internal/main/check_syntax.js b/lib/internal/main/check_syntax.js index 010bef045e4fc8..4aa0d217c28c11 100644 --- a/lib/internal/main/check_syntax.js +++ b/lib/internal/main/check_syntax.js @@ -4,8 +4,9 @@ // instead of actually running the file. const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); const { readStdin diff --git a/lib/internal/bootstrap/environment.js b/lib/internal/main/environment.js similarity index 69% rename from lib/internal/bootstrap/environment.js rename to lib/internal/main/environment.js index 79a67dae378202..0be982bfb6d25d 100644 --- a/lib/internal/bootstrap/environment.js +++ b/lib/internal/main/environment.js @@ -4,10 +4,10 @@ // that depends on run time states. // It is currently only intended for preparing contexts for embedders. -/* global markBootstrapComplete */ const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); prepareMainThreadExecution(); markBootstrapComplete(); diff --git a/lib/internal/main/eval_stdin.js b/lib/internal/main/eval_stdin.js index d97dbece8f0f56..422e05c837a830 100644 --- a/lib/internal/main/eval_stdin.js +++ b/lib/internal/main/eval_stdin.js @@ -3,8 +3,9 @@ // Stdin is not a TTY, we will read it and execute it. const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); const { getOptionValue } = require('internal/options'); diff --git a/lib/internal/main/eval_string.js b/lib/internal/main/eval_string.js index 2784204f6002e9..d4787e151a07f3 100644 --- a/lib/internal/main/eval_string.js +++ b/lib/internal/main/eval_string.js @@ -8,8 +8,9 @@ const { } = primordials; const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); const { evalModule, evalScript } = require('internal/process/execution'); const { addBuiltinLibsToObject } = require('internal/modules/cjs/helpers'); diff --git a/lib/internal/main/inspect.js b/lib/internal/main/inspect.js index b3b26ecaa960f1..6c4cc7cc36bcda 100644 --- a/lib/internal/main/inspect.js +++ b/lib/internal/main/inspect.js @@ -3,8 +3,9 @@ // `node inspect ...` or `node debug ...` const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); prepareMainThreadExecution(); diff --git a/lib/internal/main/mksnapshot.js b/lib/internal/main/mksnapshot.js index 616a436e0a9483..fdf0e021204858 100644 --- a/lib/internal/main/mksnapshot.js +++ b/lib/internal/main/mksnapshot.js @@ -114,7 +114,7 @@ function requireForUserSnapshot(id) { function main() { const { prepareMainThreadExecution - } = require('internal/bootstrap/pre_execution'); + } = require('internal/process/pre_execution'); prepareMainThreadExecution(true, false); diff --git a/lib/internal/main/print_help.js b/lib/internal/main/print_help.js index f8fc5a6075f104..bfef215ace8db5 100644 --- a/lib/internal/main/print_help.js +++ b/lib/internal/main/print_help.js @@ -20,8 +20,9 @@ const { types } = internalBinding('options'); const hasCrypto = Boolean(process.versions.openssl); const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); const typeLookup = []; for (const key of ObjectKeys(types)) diff --git a/lib/internal/main/prof_process.js b/lib/internal/main/prof_process.js index bd835bfe630fa4..3d56e3ec57fa46 100644 --- a/lib/internal/main/prof_process.js +++ b/lib/internal/main/prof_process.js @@ -1,8 +1,9 @@ 'use strict'; const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); prepareMainThreadExecution(); markBootstrapComplete(); diff --git a/lib/internal/main/repl.js b/lib/internal/main/repl.js index a8356687ccedf5..7da68dc05e84b4 100644 --- a/lib/internal/main/repl.js +++ b/lib/internal/main/repl.js @@ -4,8 +4,9 @@ // the main module is not specified and stdin is a TTY. const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); const esmLoader = require('internal/process/esm_loader'); const { diff --git a/lib/internal/main/run_main_module.js b/lib/internal/main/run_main_module.js index ca5d1122c59d94..1c2d421fc08996 100644 --- a/lib/internal/main/run_main_module.js +++ b/lib/internal/main/run_main_module.js @@ -1,8 +1,9 @@ 'use strict'; const { - prepareMainThreadExecution -} = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete +} = require('internal/process/pre_execution'); prepareMainThreadExecution(true); diff --git a/lib/internal/main/test_runner.js b/lib/internal/main/test_runner.js index c1066cfca35323..d28311f3ab5dcd 100644 --- a/lib/internal/main/test_runner.js +++ b/lib/internal/main/test_runner.js @@ -12,7 +12,8 @@ const { } = primordials; const { prepareMainThreadExecution, -} = require('internal/bootstrap/pre_execution'); + markBootstrapComplete +} = require('internal/process/pre_execution'); const { spawn } = require('child_process'); const { readdirSync, statSync } = require('fs'); const console = require('internal/console/global'); diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index 8d5bc45edd50f3..01f711aab25ccd 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -30,8 +30,9 @@ const { initializeReport, initializeSourceMapsHandlers, loadPreloadModules, - setupTraceCategoryState -} = require('internal/bootstrap/pre_execution'); + setupTraceCategoryState, + markBootstrapComplete +} = require('internal/process/pre_execution'); const { threadId, diff --git a/lib/internal/bootstrap/pre_execution.js b/lib/internal/process/pre_execution.js similarity index 99% rename from lib/internal/bootstrap/pre_execution.js rename to lib/internal/process/pre_execution.js index 823f2515f4f704..3d5657a77e512e 100644 --- a/lib/internal/bootstrap/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -572,6 +572,10 @@ function loadPreloadModules() { } } +function markBootstrapComplete() { + internalBinding('performance').markBootstrapComplete(); +} + module.exports = { refreshRuntimeOptions, patchProcessObject, @@ -592,5 +596,6 @@ module.exports = { setupInspectorHooks, initializeReport, initializeCJSLoader, - initializeWASI + initializeWASI, + markBootstrapComplete }; diff --git a/lib/internal/v8/startup_snapshot.js b/lib/internal/v8/startup_snapshot.js index 6a6a6c47e85708..86bee8749566d7 100644 --- a/lib/internal/v8/startup_snapshot.js +++ b/lib/internal/v8/startup_snapshot.js @@ -12,7 +12,6 @@ const { setSerializeCallback, setDeserializeCallback, setDeserializeMainFunction: _setDeserializeMainFunction, - markBootstrapComplete } = internalBinding('mksnapshot'); function isBuildingSnapshot() { @@ -87,8 +86,9 @@ function setDeserializeMainFunction(callback, data) { _setDeserializeMainFunction(function deserializeMain() { const { - prepareMainThreadExecution - } = require('internal/bootstrap/pre_execution'); + prepareMainThreadExecution, + markBootstrapComplete + } = require('internal/process/pre_execution'); // This should be in sync with run_main_module.js until we make that // a built-in main function. diff --git a/lib/internal/wasm_web_api.js b/lib/internal/wasm_web_api.js index 084a223806334f..8b28b5e1fb4574 100644 --- a/lib/internal/wasm_web_api.js +++ b/lib/internal/wasm_web_api.js @@ -15,7 +15,7 @@ function lazyUndici() { // This is essentially an implementation of a v8::WasmStreamingCallback, except // that it is implemented in JavaScript because the fetch() implementation is -// difficult to use from C++. See lib/internal/bootstrap/pre_execution.js and +// difficult to use from C++. See lib/internal/process/pre_execution.js and // src/node_wasm_web_api.cc that interact with this function. function wasmStreamingCallback(streamState, source) { (async () => { diff --git a/src/api/environment.cc b/src/api/environment.cc index c0b5f539451696..b09d6736722d4e 100644 --- a/src/api/environment.cc +++ b/src/api/environment.cc @@ -445,13 +445,12 @@ MaybeLocal LoadEnvironment( native_module::NativeModuleLoader::Add( name.c_str(), UnionBytes(**main_utf16, main_utf16->length())); env->set_main_utf16(std::move(main_utf16)); - std::vector> params = { - env->process_string(), - env->require_string()}; + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). std::vector> args = { env->process_object(), env->native_module_require()}; - return ExecuteBootstrapper(env, name.c_str(), ¶ms, &args); + return ExecuteBootstrapper(env, name.c_str(), &args); }); } @@ -676,8 +675,6 @@ Maybe InitializePrimordials(Local context) { Local primordials_string = FIXED_ONE_BYTE_STRING(isolate, "primordials"); - Local global_string = FIXED_ONE_BYTE_STRING(isolate, "global"); - Local exports_string = FIXED_ONE_BYTE_STRING(isolate, "exports"); // Create primordials first and make it available to per-context scripts. Local primordials = Object::New(isolate); @@ -693,12 +690,12 @@ Maybe InitializePrimordials(Local context) { nullptr}; for (const char** module = context_files; *module != nullptr; module++) { - std::vector> parameters = { - global_string, exports_string, primordials_string}; + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). Local arguments[] = {context->Global(), exports, primordials}; MaybeLocal maybe_fn = native_module::NativeModuleLoader::LookupAndCompile( - context, *module, ¶meters, nullptr); + context, *module, nullptr); Local fn; if (!maybe_fn.ToLocal(&fn)) { return Nothing(); diff --git a/src/node.cc b/src/node.cc index 5cbe48abd184e9..9b8b7f9f0bd49e 100644 --- a/src/node.cc +++ b/src/node.cc @@ -129,7 +129,6 @@ using native_module::NativeModuleLoader; using v8::EscapableHandleScope; using v8::Function; -using v8::FunctionCallbackInfo; using v8::Isolate; using v8::Local; using v8::MaybeLocal; @@ -174,11 +173,10 @@ void SignalExit(int signo, siginfo_t* info, void* ucontext) { MaybeLocal ExecuteBootstrapper(Environment* env, const char* id, - std::vector>* parameters, std::vector>* arguments) { EscapableHandleScope scope(env->isolate()); MaybeLocal maybe_fn = - NativeModuleLoader::LookupAndCompile(env->context(), id, parameters, env); + NativeModuleLoader::LookupAndCompile(env->context(), id, env); Local fn; if (!maybe_fn.ToLocal(&fn)) { @@ -301,12 +299,8 @@ void Environment::InitializeDiagnostics() { MaybeLocal Environment::BootstrapInternalLoaders() { EscapableHandleScope scope(isolate_); - // Create binding loaders - std::vector> loaders_params = { - process_string(), - FIXED_ONE_BYTE_STRING(isolate_, "getLinkedBinding"), - FIXED_ONE_BYTE_STRING(isolate_, "getInternalBinding"), - primordials_string()}; + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). std::vector> loaders_args = { process_object(), NewFunctionTemplate(isolate_, binding::GetLinkedBinding) @@ -319,8 +313,7 @@ MaybeLocal Environment::BootstrapInternalLoaders() { // Bootstrap internal loaders Local loader_exports; - if (!ExecuteBootstrapper( - this, "internal/bootstrap/loaders", &loaders_params, &loaders_args) + if (!ExecuteBootstrapper(this, "internal/bootstrap/loaders", &loaders_args) .ToLocal(&loader_exports)) { return MaybeLocal(); } @@ -342,28 +335,25 @@ MaybeLocal Environment::BootstrapInternalLoaders() { MaybeLocal Environment::BootstrapNode() { EscapableHandleScope scope(isolate_); + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). // process, require, internalBinding, primordials - std::vector> node_params = { - process_string(), - require_string(), - internal_binding_string(), - primordials_string()}; std::vector> node_args = { process_object(), native_module_require(), internal_binding_loader(), primordials()}; - MaybeLocal result = ExecuteBootstrapper( - this, "internal/bootstrap/node", &node_params, &node_args); + MaybeLocal result = + ExecuteBootstrapper(this, "internal/bootstrap/node", &node_args); if (result.IsEmpty()) { return MaybeLocal(); } if (!no_browser_globals()) { - result = ExecuteBootstrapper( - this, "internal/bootstrap/browser", &node_params, &node_args); + result = + ExecuteBootstrapper(this, "internal/bootstrap/browser", &node_args); if (result.IsEmpty()) { return MaybeLocal(); @@ -374,8 +364,7 @@ MaybeLocal Environment::BootstrapNode() { auto thread_switch_id = is_main_thread() ? "internal/bootstrap/switches/is_main_thread" : "internal/bootstrap/switches/is_not_main_thread"; - result = - ExecuteBootstrapper(this, thread_switch_id, &node_params, &node_args); + result = ExecuteBootstrapper(this, thread_switch_id, &node_args); if (result.IsEmpty()) { return MaybeLocal(); @@ -385,8 +374,7 @@ MaybeLocal Environment::BootstrapNode() { owns_process_state() ? "internal/bootstrap/switches/does_own_process_state" : "internal/bootstrap/switches/does_not_own_process_state"; - result = ExecuteBootstrapper( - this, process_state_switch_id, &node_params, &node_args); + result = ExecuteBootstrapper(this, process_state_switch_id, &node_args); if (result.IsEmpty()) { return MaybeLocal(); @@ -428,35 +416,20 @@ MaybeLocal Environment::RunBootstrapping() { return scope.Escape(result); } -void MarkBootstrapComplete(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - env->performance_state()->Mark( - performance::NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE); -} - static MaybeLocal StartExecution(Environment* env, const char* main_script_id) { EscapableHandleScope scope(env->isolate()); CHECK_NOT_NULL(main_script_id); - std::vector> parameters = { - env->process_string(), - env->require_string(), - env->internal_binding_string(), - env->primordials_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "markBootstrapComplete")}; - - std::vector> arguments = { - env->process_object(), - env->native_module_require(), - env->internal_binding_loader(), - env->primordials(), - NewFunctionTemplate(env->isolate(), MarkBootstrapComplete) - ->GetFunction(env->context()) - .ToLocalChecked()}; + // Arguments must match the parameters specified in + // NativeModuleLoader::LookupAndCompile(). + std::vector> arguments = {env->process_object(), + env->native_module_require(), + env->internal_binding_loader(), + env->primordials()}; return scope.EscapeMaybe( - ExecuteBootstrapper(env, main_script_id, ¶meters, &arguments)); + ExecuteBootstrapper(env, main_script_id, &arguments)); } MaybeLocal StartExecution(Environment* env, StartExecutionCallback cb) { @@ -469,8 +442,7 @@ MaybeLocal StartExecution(Environment* env, StartExecutionCallback cb) { if (cb != nullptr) { EscapableHandleScope scope(env->isolate()); - if (StartExecution(env, "internal/bootstrap/environment").IsEmpty()) - return {}; + if (StartExecution(env, "internal/main/environment").IsEmpty()) return {}; StartExecutionCallbackInfo info = { env->process_object(), diff --git a/src/node_internals.h b/src/node_internals.h index 464b6685de1fe6..1401cb33293e61 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -308,7 +308,6 @@ v8::MaybeLocal GetPerContextExports(v8::Local context); v8::MaybeLocal ExecuteBootstrapper( Environment* env, const char* id, - std::vector>* parameters, std::vector>* arguments); void MarkBootstrapComplete(const v8::FunctionCallbackInfo& args); diff --git a/src/node_native_module.cc b/src/node_native_module.cc index 7a515cc1efaeb2..ffd406625db5c1 100644 --- a/src/node_native_module.cc +++ b/src/node_native_module.cc @@ -184,21 +184,6 @@ ScriptCompiler::CachedData* NativeModuleLoader::GetCodeCache( return it->second.get(); } -MaybeLocal NativeModuleLoader::CompileAsModule( - Local context, - const char* id, - NativeModuleLoader::Result* result) { - Isolate* isolate = context->GetIsolate(); - std::vector> parameters = { - FIXED_ONE_BYTE_STRING(isolate, "exports"), - FIXED_ONE_BYTE_STRING(isolate, "require"), - FIXED_ONE_BYTE_STRING(isolate, "module"), - FIXED_ONE_BYTE_STRING(isolate, "process"), - FIXED_ONE_BYTE_STRING(isolate, "internalBinding"), - FIXED_ONE_BYTE_STRING(isolate, "primordials")}; - return LookupAndCompileInternal(context, id, ¶meters, result); -} - #ifdef NODE_BUILTIN_MODULES_PATH static std::string OnDiskFileName(const char* id) { std::string filename = NODE_BUILTIN_MODULES_PATH; @@ -360,29 +345,84 @@ MaybeLocal NativeModuleLoader::LookupAndCompileInternal( MaybeLocal NativeModuleLoader::LookupAndCompile( Local context, const char* id, - std::vector>* parameters, Environment* optional_env) { Result result; - MaybeLocal maybe = - GetInstance()->LookupAndCompileInternal(context, id, parameters, &result); + std::vector> parameters; + Isolate* isolate = context->GetIsolate(); + // Detects parameters of the scripts based on module ids. + // internal/bootstrap/loaders: process, getLinkedBinding, + // getInternalBinding, primordials + if (strcmp(id, "internal/bootstrap/loaders") == 0) { + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "process"), + FIXED_ONE_BYTE_STRING(isolate, "getLinkedBinding"), + FIXED_ONE_BYTE_STRING(isolate, "getInternalBinding"), + FIXED_ONE_BYTE_STRING(isolate, "primordials"), + }; + } else if (strncmp(id, + "internal/per_context/", + strlen("internal/per_context/")) == 0) { + // internal/per_context/*: global, exports, primordials + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "global"), + FIXED_ONE_BYTE_STRING(isolate, "exports"), + FIXED_ONE_BYTE_STRING(isolate, "primordials"), + }; + } else if (strncmp(id, "internal/main/", strlen("internal/main/")) == 0) { + // internal/main/*: process, require, internalBinding, primordials + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "process"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + FIXED_ONE_BYTE_STRING(isolate, "internalBinding"), + FIXED_ONE_BYTE_STRING(isolate, "primordials"), + }; + } else if (strncmp(id, "embedder_main_", strlen("embedder_main_")) == 0) { + // Synthetic embedder main scripts from LoadEnvironment(): process, require + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "process"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + }; + } else if (strncmp(id, + "internal/bootstrap/", + strlen("internal/bootstrap/")) == 0) { + // internal/bootstrap/*: process, require, internalBinding, primordials + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "process"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + FIXED_ONE_BYTE_STRING(isolate, "internalBinding"), + FIXED_ONE_BYTE_STRING(isolate, "primordials"), + }; + } else { + // others: exports, require, module, process, internalBinding, primordials + parameters = { + FIXED_ONE_BYTE_STRING(isolate, "exports"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + FIXED_ONE_BYTE_STRING(isolate, "module"), + FIXED_ONE_BYTE_STRING(isolate, "process"), + FIXED_ONE_BYTE_STRING(isolate, "internalBinding"), + FIXED_ONE_BYTE_STRING(isolate, "primordials"), + }; + } + + MaybeLocal maybe = GetInstance()->LookupAndCompileInternal( + context, id, ¶meters, &result); if (optional_env != nullptr) { RecordResult(id, result, optional_env); } return maybe; } -bool NativeModuleLoader::CompileAllModules(Local context) { +bool NativeModuleLoader::CompileAllBuiltins(Local context) { NativeModuleLoader* loader = GetInstance(); std::vector ids = loader->GetModuleIds(); bool all_succeeded = true; + std::string v8_tools_prefix = "internal/deps/v8/tools/"; for (const auto& id : ids) { - // TODO(joyeecheung): compile non-module scripts here too. - if (!loader->CanBeRequired(id.c_str())) { + if (id.compare(0, v8_tools_prefix.size(), v8_tools_prefix) == 0) { continue; } v8::TryCatch bootstrapCatch(context->GetIsolate()); - Result result; - USE(loader->CompileAsModule(context, id.c_str(), &result)); + USE(loader->LookupAndCompile(context, id.c_str(), nullptr)); if (bootstrapCatch.HasCaught()) { per_process::Debug(DebugCategory::CODE_CACHE, "Failed to compile code cache for %s\n", @@ -538,16 +578,15 @@ void NativeModuleLoader::RecordResult(const char* id, env->native_modules_without_cache.insert(id); } } + void NativeModuleLoader::CompileFunction( const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); CHECK(args[0]->IsString()); node::Utf8Value id_v(env->isolate(), args[0].As()); const char* id = *id_v; - NativeModuleLoader::Result result; MaybeLocal maybe = - GetInstance()->CompileAsModule(env->context(), id, &result); - RecordResult(id, result, env); + GetInstance()->LookupAndCompile(env->context(), id, env); Local fn; if (maybe.ToLocal(&fn)) { args.GetReturnValue().Set(fn); diff --git a/src/node_native_module.h b/src/node_native_module.h index 3c797a5d35b17e..5357a1272d3214 100644 --- a/src/node_native_module.h +++ b/src/node_native_module.h @@ -45,10 +45,11 @@ class NODE_EXTERN_PRIVATE NativeModuleLoader { v8::Local context, void* priv); + // The parameters used to compile the scripts are detected based on + // the pattern of the id. static v8::MaybeLocal LookupAndCompile( v8::Local context, const char* id, - std::vector>* parameters, Environment* optional_env); static v8::Local GetSourceObject(v8::Local context); @@ -57,7 +58,7 @@ class NODE_EXTERN_PRIVATE NativeModuleLoader { static bool Exists(const char* id); static bool Add(const char* id, const UnionBytes& source); - static bool CompileAllModules(v8::Local context); + static bool CompileAllBuiltins(v8::Local context); static void RefreshCodeCache(const std::vector& in); static void CopyCodeCache(std::vector* out); @@ -100,9 +101,6 @@ class NODE_EXTERN_PRIVATE NativeModuleLoader { const char* id, std::vector>* parameters, Result* result); - v8::MaybeLocal CompileAsModule(v8::Local context, - const char* id, - Result* result); static void RecordResult(const char* id, NativeModuleLoader::Result result, diff --git a/src/node_perf.cc b/src/node_perf.cc index 15f45cba78c1e0..f08237f03c405e 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -265,6 +265,12 @@ void GetTimeOriginTimeStamp(const FunctionCallbackInfo& args) { Number::New(args.GetIsolate(), timeOriginTimestamp / MICROS_PER_MILLIS)); } +void MarkBootstrapComplete(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + env->performance_state()->Mark( + performance::NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE); +} + void Initialize(Local target, Local unused, Local context, @@ -304,6 +310,7 @@ void Initialize(Local target, SetMethod(context, target, "getTimeOrigin", GetTimeOrigin); SetMethod(context, target, "getTimeOriginTimestamp", GetTimeOriginTimeStamp); SetMethod(context, target, "createELDHistogram", CreateELDHistogram); + SetMethod(context, target, "markBootstrapComplete", MarkBootstrapComplete); Local constants = Object::New(isolate); @@ -358,6 +365,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GetTimeOrigin); registry->Register(GetTimeOriginTimeStamp); registry->Register(CreateELDHistogram); + registry->Register(MarkBootstrapComplete); HistogramBase::RegisterExternalReferences(registry); IntervalHistogram::RegisterExternalReferences(registry); } diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index a7551bf1af14c0..880c50663f9aa1 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -285,7 +285,8 @@ int SnapshotBuilder::Generate(SnapshotData* out, #ifdef NODE_USE_NODE_CODE_CACHE // Regenerate all the code cache. - if (!native_module::NativeModuleLoader::CompileAllModules(main_context)) { + if (!native_module::NativeModuleLoader::CompileAllBuiltins( + main_context)) { return UNCAUGHT_EXCEPTION_ERROR; } native_module::NativeModuleLoader::CopyCodeCache(&(out->code_cache)); @@ -521,7 +522,6 @@ void Initialize(Local target, Local context, void* priv) { SetMethod(context, target, "compileSerializeMain", CompileSerializeMain); - SetMethod(context, target, "markBootstrapComplete", MarkBootstrapComplete); SetMethod(context, target, "setSerializeCallback", SetSerializeCallback); SetMethod(context, target, "setDeserializeCallback", SetDeserializeCallback); SetMethod(context, @@ -532,7 +532,6 @@ void Initialize(Local target, void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(CompileSerializeMain); - registry->Register(MarkBootstrapComplete); registry->Register(SetSerializeCallback); registry->Register(SetDeserializeCallback); registry->Register(SetDeserializeMainFunction); diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index 87c9dc1b92523c..63ef4336123640 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -52,7 +52,6 @@ const expectedModules = new Set([ 'NativeModule internal/assert', 'NativeModule internal/async_hooks', 'NativeModule internal/blocklist', - 'NativeModule internal/bootstrap/pre_execution', 'NativeModule internal/buffer', 'NativeModule internal/console/constructor', 'NativeModule internal/console/global', @@ -106,6 +105,7 @@ const expectedModules = new Set([ 'NativeModule internal/process/esm_loader', 'NativeModule internal/process/execution', 'NativeModule internal/process/per_thread', + 'NativeModule internal/process/pre_execution', 'NativeModule internal/process/promises', 'NativeModule internal/process/report', 'NativeModule internal/process/signal', diff --git a/test/parallel/test-code-cache.js b/test/parallel/test-code-cache.js index 00deafd6d49465..44fe98238ad069 100644 --- a/test/parallel/test-code-cache.js +++ b/test/parallel/test-code-cache.js @@ -12,7 +12,7 @@ const { } = require('internal/test/binding'); const { getCacheUsage, - moduleCategories: { canBeRequired, cannotBeRequired } + moduleCategories: { canBeRequired } } = internalBinding('native_module'); for (const key of canBeRequired) { @@ -54,20 +54,12 @@ if (!process.features.cached_builtins) { } else { // Native compiled assert(process.config.variables.node_use_node_code_cache); - if (!isMainThread) { - for (const key of [ 'internal/bootstrap/pre_execution' ]) { - canBeRequired.add(key); - cannotBeRequired.delete(key); - } - } - const wrong = []; for (const key of loadedModules) { - if (cannotBeRequired.has(key) && !compiledWithoutCache.has(key)) { - wrong.push(`"${key}" should've been compiled **without** code cache`); + if (key.startsWith('internal/deps/v8/tools')) { + continue; } - if (canBeRequired.has(key) && - !compiledWithCache.has(key) && + if (!compiledWithCache.has(key) && compiledInSnapshot.indexOf(key) === -1) { wrong.push(`"${key}" should've been compiled **with** code cache`); } From 5e9c197d85b79fc19425a5d26dbd09ecaf62e148 Mon Sep 17 00:00:00 2001 From: Jacob Smith <3012099+JakobJingleheimer@users.noreply.github.com> Date: Fri, 5 Aug 2022 00:10:13 +0200 Subject: [PATCH 068/177] esm: fix loader hooks accepting too many arguments PR-URL: https://github.com/nodejs/node/pull/44109 Reviewed-By: Geoffrey Booth Reviewed-By: Guy Bedford Reviewed-By: Antoine du Hamel --- lib/internal/modules/esm/loader.js | 22 +++++---------- test/es-module/test-esm-loader-chaining.mjs | 22 +++++++++++++++ .../es-module-loaders/loader-log-args.mjs | 28 +++++++++++++++++++ .../loader-with-too-many-args.mjs | 7 +++++ 4 files changed, 64 insertions(+), 15 deletions(-) create mode 100644 test/fixtures/es-module-loaders/loader-log-args.mjs create mode 100644 test/fixtures/es-module-loaders/loader-with-too-many-args.mjs diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 027077cbb6e781..f2d5600fba657c 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -15,7 +15,6 @@ const { ObjectDefineProperty, ObjectSetPrototypeOf, PromiseAll, - ReflectApply, RegExpPrototypeExec, SafeArrayIterator, SafeWeakMap, @@ -148,29 +147,22 @@ function nextHookFactory(chain, meta, { validateArgs, validateOutput }) { } return ObjectDefineProperty( - async (...args) => { + async (arg0 = undefined, context) => { // Update only when hook is invoked to avoid fingering the wrong filePath meta.hookErrIdentifier = `${hookFilePath} '${hookName}'`; - validateArgs(`${meta.hookErrIdentifier} hook's ${nextHookName}()`, args); + validateArgs(`${meta.hookErrIdentifier} hook's ${nextHookName}()`, arg0, context); const outputErrIdentifier = `${chain[generatedHookIndex].url} '${hookName}' hook's ${nextHookName}()`; // Set when next is actually called, not just generated. if (generatedHookIndex === 0) { meta.chainFinished = true; } - // `context` is an optional argument that only needs to be passed when changed - switch (args.length) { - case 1: // It was omitted, so supply the cached value - ArrayPrototypePush(args, meta.context); - break; - case 2: // Overrides were supplied, so update cached value - ObjectAssign(meta.context, args[1]); - break; + if (context) { // `context` has already been validated, so no fancy check needed. + ObjectAssign(meta.context, context); } - ArrayPrototypePush(args, nextNextHook); - const output = await ReflectApply(hook, undefined, args); + const output = await hook(arg0, meta.context, nextNextHook); validateOutput(outputErrIdentifier, output); @@ -575,7 +567,7 @@ class ESMLoader { shortCircuited: false, }; - const validateArgs = (hookErrIdentifier, { 0: nextUrl, 1: ctx }) => { + const validateArgs = (hookErrIdentifier, nextUrl, ctx) => { if (typeof nextUrl !== 'string') { // non-strings can be coerced to a url string // validateString() throws a less-specific error @@ -829,7 +821,7 @@ class ESMLoader { shortCircuited: false, }; - const validateArgs = (hookErrIdentifier, { 0: suppliedSpecifier, 1: ctx }) => { + const validateArgs = (hookErrIdentifier, suppliedSpecifier, ctx) => { validateString( suppliedSpecifier, `${hookErrIdentifier} specifier`, diff --git a/test/es-module/test-esm-loader-chaining.mjs b/test/es-module/test-esm-loader-chaining.mjs index 14303cb5c42665..b04dbe4ddd6c1a 100644 --- a/test/es-module/test-esm-loader-chaining.mjs +++ b/test/es-module/test-esm-loader-chaining.mjs @@ -101,6 +101,28 @@ describe('ESM: loader chaining', { concurrency: true }, () => { assert.strictEqual(code, 0); }); + it('should accept only the correct arguments', async () => { + const { stdout } = await spawnPromisified( + execPath, + [ + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-log-args.mjs'), + '--loader', + fixtures.fileURL('es-module-loaders', 'loader-with-too-many-args.mjs'), + ...commonArgs, + ], + { encoding: 'utf8' }, + ); + + assert.match(stdout, /^resolve arg count: 3$/m); + assert.match(stdout, /specifier: 'node:fs'/); + assert.match(stdout, /next: \[AsyncFunction: nextResolve\]/); + + assert.match(stdout, /^load arg count: 3$/m); + assert.match(stdout, /url: 'node:fs'/); + assert.match(stdout, /next: \[AsyncFunction: nextLoad\]/); + }); + it('should result in proper output from multiple changes in resolve hooks', async () => { const { code, stderr, stdout } = await spawnPromisified( execPath, diff --git a/test/fixtures/es-module-loaders/loader-log-args.mjs b/test/fixtures/es-module-loaders/loader-log-args.mjs new file mode 100644 index 00000000000000..84ed373d6b4de4 --- /dev/null +++ b/test/fixtures/es-module-loaders/loader-log-args.mjs @@ -0,0 +1,28 @@ +export async function resolve(...args) { + console.log(`resolve arg count: ${args.length}`); + console.log({ + specifier: args[0], + context: args[1], + next: args[2], + }); + + return { + shortCircuit: true, + url: args[0], + }; +} + +export async function load(...args) { + console.log(`load arg count: ${args.length}`); + console.log({ + url: args[0], + context: args[1], + next: args[2], + }); + + return { + format: 'module', + source: '', + shortCircuit: true, + }; +} diff --git a/test/fixtures/es-module-loaders/loader-with-too-many-args.mjs b/test/fixtures/es-module-loaders/loader-with-too-many-args.mjs new file mode 100644 index 00000000000000..95f40ec15d200d --- /dev/null +++ b/test/fixtures/es-module-loaders/loader-with-too-many-args.mjs @@ -0,0 +1,7 @@ +export async function resolve(specifier, context, next) { + return next(specifier, context, 'resolve-extra-arg'); +} + +export async function load(url, context, next) { + return next(url, context, 'load-extra-arg'); +} From cb7a9e78fdbc7fef8fd68c9a082ad0665c064357 Mon Sep 17 00:00:00 2001 From: Colin Ihrig Date: Thu, 4 Aug 2022 18:32:48 -0400 Subject: [PATCH 069/177] doc: remove unused code in call tracker example MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This function appears to be unused, so remove it. PR-URL: https://github.com/nodejs/node/pull/44127 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Evan Lucas Reviewed-By: Antoine du Hamel --- doc/api/assert.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/doc/api/assert.md b/doc/api/assert.md index 4847bbf86dda68..4958e6dd7df474 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -351,8 +351,6 @@ const tracker = new assert.CallTracker(); function func() {} -function foo() {} - // Returns a function that wraps func() that must be called exact times // before tracker.verify(). const callsfunc = tracker.calls(func, 2); @@ -379,8 +377,6 @@ const tracker = new assert.CallTracker(); function func() {} -function foo() {} - // Returns a function that wraps func() that must be called exact times // before tracker.verify(). const callsfunc = tracker.calls(func, 2); From 123b2d6795e877fa11320bd6f30f297868aeddb7 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 5 Aug 2022 06:41:17 +0800 Subject: [PATCH 070/177] bootstrap: turn on FunctionCodeHandling::kKeep in the snapshot builder To improve startup performance. PR-URL: https://github.com/nodejs/node/pull/44104 Reviewed-By: Chengzhong Wu Reviewed-By: Anna Henningsen --- src/node_snapshotable.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index 880c50663f9aa1..f0c71042fd5e57 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -313,7 +313,7 @@ int SnapshotBuilder::Generate(SnapshotData* out, // Must be out of HandleScope out->v8_snapshot_blob_data = - creator.CreateBlob(SnapshotCreator::FunctionCodeHandling::kClear); + creator.CreateBlob(SnapshotCreator::FunctionCodeHandling::kKeep); // We must be able to rehash the blob when we restore it or otherwise // the hash seed would be fixed by V8, introducing a vulnerability. From 2f904bc8bfac6790938bde7df0face324b11f284 Mon Sep 17 00:00:00 2001 From: Kohei Ueno Date: Fri, 5 Aug 2022 07:41:28 +0900 Subject: [PATCH 071/177] stream: update TextEncoderStream to align the latest spec PR-URL: https://github.com/nodejs/node/pull/44101 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Antoine du Hamel --- lib/internal/webstreams/encoding.js | 43 +++++++++++++++++++++++++---- test/wpt/status/encoding.json | 31 +++++++++++++++++++-- 2 files changed, 66 insertions(+), 8 deletions(-) diff --git a/lib/internal/webstreams/encoding.js b/lib/internal/webstreams/encoding.js index 233a09a216d72d..b95441e12e0cf1 100644 --- a/lib/internal/webstreams/encoding.js +++ b/lib/internal/webstreams/encoding.js @@ -2,7 +2,10 @@ const { ObjectDefineProperties, + String, + StringPrototypeCharCodeAt, Symbol, + Uint8Array, } = primordials; const { @@ -31,6 +34,7 @@ const { const kHandle = Symbol('kHandle'); const kTransform = Symbol('kTransform'); const kType = Symbol('kType'); +const kPendingHighSurrogate = Symbol('kPendingHighSurrogate'); /** * @typedef {import('./readablestream').ReadableStream} ReadableStream @@ -49,19 +53,46 @@ function isTextDecoderStream(value) { class TextEncoderStream { constructor() { + this[kPendingHighSurrogate] = null; this[kType] = 'TextEncoderStream'; this[kHandle] = new TextEncoder(); this[kTransform] = new TransformStream({ transform: (chunk, controller) => { - const value = this[kHandle].encode(chunk); - if (value) + // https://encoding.spec.whatwg.org/#encode-and-enqueue-a-chunk + chunk = String(chunk); + let finalChunk = ''; + for (let i = 0; i < chunk.length; i++) { + const item = chunk[i]; + const codeUnit = StringPrototypeCharCodeAt(item, 0); + if (this[kPendingHighSurrogate] !== null) { + const highSurrogate = this[kPendingHighSurrogate]; + this[kPendingHighSurrogate] = null; + if (0xDC00 <= codeUnit && codeUnit <= 0xDFFF) { + finalChunk += highSurrogate + item; + continue; + } + finalChunk += '\uFFFD'; + } + if (0xD800 <= codeUnit && codeUnit <= 0xDBFF) { + this[kPendingHighSurrogate] = item; + continue; + } + if (0xDC00 <= codeUnit && codeUnit <= 0xDFFF) { + finalChunk += '\uFFFD'; + continue; + } + finalChunk += item; + } + if (finalChunk) { + const value = this[kHandle].encode(finalChunk); controller.enqueue(value); + } }, flush: (controller) => { - const value = this[kHandle].encode(); - if (value.byteLength > 0) - controller.enqueue(value); - controller.terminate(); + // https://encoding.spec.whatwg.org/#encode-and-flush + if (this[kPendingHighSurrogate] !== null) { + controller.enqueue(new Uint8Array([0xEF, 0xBF, 0xBD])); + } }, }); } diff --git a/test/wpt/status/encoding.json b/test/wpt/status/encoding.json index 15dad0b2d4f8a0..a9fe29a0bbc3fa 100644 --- a/test/wpt/status/encoding.json +++ b/test/wpt/status/encoding.json @@ -48,8 +48,35 @@ "unsupported-encodings.any.js": { "skip": "decoding-helpers.js needs XMLHttpRequest" }, - "streams/*.js": { - "fail": "No implementation of TextDecoderStream and TextEncoderStream" + "streams/decode-ignore-bom.any.js": { + "requires": ["small-icu"] + }, + "streams/realms.window.js": { + "skip": "window is not defined" + }, + "streams/decode-attributes.any.js": { + "requires": ["full-icu"] + }, + "streams/decode-incomplete-input.any.js": { + "requires": ["small-icu"] + }, + "streams/decode-utf8.any.js": { + "requires": ["small-icu"], + "fail": { + "unexpected": [ + "promise_test: Unhandled rejection with value: object 'TypeError: Cannot perform Construct on a detached ArrayBuffer'" + ] + } + }, + "streams/decode-bad-chunks.any.js": { + "fail": { + "unexpected": [ + "assert_unreached: Should have rejected: write should reject Reached unreachable code" + ] + } + }, + "streams/decode-non-utf8.any.js": { + "requires": ["full-icu"] }, "encodeInto.any.js": { "requires": ["small-icu"] From 4755ad5495b45c25eb888996abf8e683d7f6f265 Mon Sep 17 00:00:00 2001 From: Keyhan Vakil Date: Thu, 4 Aug 2022 15:41:38 -0700 Subject: [PATCH 072/177] src: remove usages of GetBackingStore in crypto This removes all usages of GetBackingStore in `crypto`. See the linked issue for an explanation. Note: I am not sure of the lifetime semantics intended by `ArrayBufferOrViewContents` -- I am pretty sure it is correct based on a manual audit of the callsites, but please ensure that it is correct. Refs: https://github.com/nodejs/node/issues/32226 Refs: https://github.com/nodejs/node/pull/43921 PR-URL: https://github.com/nodejs/node/pull/44079 Reviewed-By: Matteo Collina Reviewed-By: Feng Yu Reviewed-By: Anna Henningsen --- src/crypto/README.md | 5 ++++- src/crypto/crypto_cipher.cc | 15 ++++++--------- src/crypto/crypto_util.h | 25 +++++++++++++++++++------ 3 files changed, 29 insertions(+), 16 deletions(-) diff --git a/src/crypto/README.md b/src/crypto/README.md index 1f5e5b20448499..c58f3cb118089f 100644 --- a/src/crypto/README.md +++ b/src/crypto/README.md @@ -112,12 +112,15 @@ the `ByteSource::Builder` without releasing it as a `ByteSource`. ### `ArrayBufferOrViewContents` -The `ArrayBufferOfViewContents` class is a helper utility that abstracts +The `ArrayBufferOrViewContents` class is a helper utility that abstracts `ArrayBuffer`, `TypedArray`, or `DataView` inputs and provides access to their underlying data pointers. It is used extensively through `src/crypto` to make it easier to deal with inputs that allow any `ArrayBuffer`-backed object. +The lifetime of `ArrayBufferOrViewContents` should not exceed the +lifetime of its input. + ### Key objects Most crypto operations involve the use of keys -- cryptographic inputs diff --git a/src/crypto/crypto_cipher.cc b/src/crypto/crypto_cipher.cc index 0ce2708d12ceb8..44b54363dd1ec2 100644 --- a/src/crypto/crypto_cipher.cc +++ b/src/crypto/crypto_cipher.cc @@ -536,9 +536,8 @@ void CipherBase::InitIv(const FunctionCallbackInfo& args) { if (UNLIKELY(key_buf.size() > INT_MAX)) return THROW_ERR_OUT_OF_RANGE(env, "key is too big"); - ArrayBufferOrViewContents iv_buf; - if (!args[2]->IsNull()) - iv_buf = ArrayBufferOrViewContents(args[2]); + ArrayBufferOrViewContents iv_buf( + !args[2]->IsNull() ? args[2] : Local()); if (UNLIKELY(!iv_buf.CheckSizeInt32())) return THROW_ERR_OUT_OF_RANGE(env, "iv is too big"); @@ -1061,12 +1060,10 @@ void PublicKeyCipher::Cipher(const FunctionCallbackInfo& args) { return THROW_ERR_OSSL_EVP_INVALID_DIGEST(env); } - ArrayBufferOrViewContents oaep_label; - if (!args[offset + 3]->IsUndefined()) { - oaep_label = ArrayBufferOrViewContents(args[offset + 3]); - if (UNLIKELY(!oaep_label.CheckSizeInt32())) - return THROW_ERR_OUT_OF_RANGE(env, "oaep_label is too big"); - } + ArrayBufferOrViewContents oaep_label( + !args[offset + 3]->IsUndefined() ? args[offset + 3] : Local()); + if (UNLIKELY(!oaep_label.CheckSizeInt32())) + return THROW_ERR_OUT_OF_RANGE(env, "oaep_label is too big"); std::unique_ptr out; if (!Cipher( diff --git a/src/crypto/crypto_util.h b/src/crypto/crypto_util.h index 69ada7995bd226..4afae1884fe40e 100644 --- a/src/crypto/crypto_util.h +++ b/src/crypto/crypto_util.h @@ -699,24 +699,30 @@ template class ArrayBufferOrViewContents { public: ArrayBufferOrViewContents() = default; + ArrayBufferOrViewContents(const ArrayBufferOrViewContents&) = delete; + void operator=(const ArrayBufferOrViewContents&) = delete; inline explicit ArrayBufferOrViewContents(v8::Local buf) { + if (buf.IsEmpty()) { + return; + } + CHECK(IsAnyByteSource(buf)); if (buf->IsArrayBufferView()) { auto view = buf.As(); offset_ = view->ByteOffset(); length_ = view->ByteLength(); - store_ = view->Buffer()->GetBackingStore(); + data_ = view->Buffer()->Data(); } else if (buf->IsArrayBuffer()) { auto ab = buf.As(); offset_ = 0; length_ = ab->ByteLength(); - store_ = ab->GetBackingStore(); + data_ = ab->Data(); } else { auto sab = buf.As(); offset_ = 0; length_ = sab->ByteLength(); - store_ = sab->GetBackingStore(); + data_ = sab->Data(); } } @@ -726,7 +732,7 @@ class ArrayBufferOrViewContents { // length is zero, so we have to return something. if (size() == 0) return &buf; - return reinterpret_cast(store_->Data()) + offset_; + return reinterpret_cast(data_) + offset_; } inline T* data() { @@ -735,7 +741,7 @@ class ArrayBufferOrViewContents { // length is zero, so we have to return something. if (size() == 0) return &buf; - return reinterpret_cast(store_->Data()) + offset_; + return reinterpret_cast(data_) + offset_; } inline size_t size() const { return length_; } @@ -775,7 +781,14 @@ class ArrayBufferOrViewContents { T buf = 0; size_t offset_ = 0; size_t length_ = 0; - std::shared_ptr store_; + void* data_ = nullptr; + + // Declaring operator new and delete as deleted is not spec compliant. + // Therefore declare them private instead to disable dynamic alloc + void* operator new(size_t); + void* operator new[](size_t); + void operator delete(void*); + void operator delete[](void*); }; v8::MaybeLocal EncodeBignum( From 05802c287756e6334ac119144e5459bb32a7a52e Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Fri, 5 Aug 2022 00:41:48 +0200 Subject: [PATCH 073/177] module: protect against prototype mutation Ensures that mutating the `Object` prototype does not influence the parsing of `package.json` files. PR-URL: https://github.com/nodejs/node/pull/44007 Reviewed-By: Geoffrey Booth --- lib/internal/modules/cjs/helpers.js | 3 +- lib/internal/modules/cjs/loader.js | 21 +++++---- lib/internal/modules/esm/package_config.js | 6 ++- lib/internal/util.js | 32 ++++++++++++++ test/fixtures/es-module-specifiers/index.mjs | 3 +- .../node_modules/no-main-field/index.js | 2 + .../node_modules/no-main-field/package.json | 1 + .../test-module-prototype-mutation.js | 43 +++++++++++++++++++ 8 files changed, 96 insertions(+), 15 deletions(-) create mode 100644 test/fixtures/es-module-specifiers/node_modules/no-main-field/index.js create mode 100644 test/fixtures/es-module-specifiers/node_modules/no-main-field/package.json create mode 100644 test/parallel/test-module-prototype-mutation.js diff --git a/lib/internal/modules/cjs/helpers.js b/lib/internal/modules/cjs/helpers.js index ef4544ca0aaa52..20c167f8599f7b 100644 --- a/lib/internal/modules/cjs/helpers.js +++ b/lib/internal/modules/cjs/helpers.js @@ -24,6 +24,7 @@ const path = require('path'); const { pathToFileURL, fileURLToPath, URL } = require('internal/url'); const { getOptionValue } = require('internal/options'); +const { setOwnProperty } = require('internal/util'); const userConditions = getOptionValue('--conditions'); let debug = require('internal/util/debuglog').debuglog('module', (fn) => { @@ -117,7 +118,7 @@ function makeRequireFunction(mod, redirects) { resolve.paths = paths; - require.main = process.mainModule; + setOwnProperty(require, 'main', process.mainModule); // Enable support to add extra extension types. require.extensions = Module._extensions; diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 711589894d5d19..7c08ff8df31883 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -79,7 +79,7 @@ const { maybeCacheSourceMap, } = require('internal/source_map/source_map_cache'); const { pathToFileURL, fileURLToPath, isURLInstance } = require('internal/url'); -const { deprecate, kEmptyObject } = require('internal/util'); +const { deprecate, kEmptyObject, filterOwnProperties, setOwnProperty } = require('internal/util'); const vm = require('vm'); const assert = require('internal/assert'); const fs = require('fs'); @@ -172,7 +172,7 @@ const moduleParentCache = new SafeWeakMap(); function Module(id = '', parent) { this.id = id; this.path = path.dirname(id); - this.exports = {}; + setOwnProperty(this, 'exports', {}); moduleParentCache.set(this, parent); updateChildren(parent, this, false); this.filename = null; @@ -312,14 +312,13 @@ function readPackage(requestPath) { } try { - const parsed = JSONParse(json); - const filtered = { - name: parsed.name, - main: parsed.main, - exports: parsed.exports, - imports: parsed.imports, - type: parsed.type - }; + const filtered = filterOwnProperties(JSONParse(json), [ + 'name', + 'main', + 'exports', + 'imports', + 'type', + ]); packageJsonCache.set(jsonPath, filtered); return filtered; } catch (e) { @@ -1185,7 +1184,7 @@ Module._extensions['.json'] = function(module, filename) { } try { - module.exports = JSONParse(stripBOM(content)); + setOwnProperty(module, 'exports', JSONParse(stripBOM(content))); } catch (err) { err.message = filename + ': ' + err.message; throw err; diff --git a/lib/internal/modules/esm/package_config.js b/lib/internal/modules/esm/package_config.js index 89e90d0d997cd2..7271d065de9461 100644 --- a/lib/internal/modules/esm/package_config.js +++ b/lib/internal/modules/esm/package_config.js @@ -2,6 +2,7 @@ const { JSONParse, + ObjectPrototypeHasOwnProperty, SafeMap, StringPrototypeEndsWith, } = primordials; @@ -11,6 +12,7 @@ const { } = require('internal/errors').codes; const packageJsonReader = require('internal/modules/package_json_reader'); +const { filterOwnProperties } = require('internal/util'); /** @@ -66,8 +68,8 @@ function getPackageConfig(path, specifier, base) { ); } - let { imports, main, name, type } = packageJSON; - const { exports } = packageJSON; + let { imports, main, name, type } = filterOwnProperties(packageJSON, ['imports', 'main', 'name', 'type']); + const exports = ObjectPrototypeHasOwnProperty(packageJSON, 'exports') ? packageJSON.exports : undefined; if (typeof imports !== 'object' || imports === null) { imports = undefined; } diff --git a/lib/internal/util.js b/lib/internal/util.js index 82b52b401984fb..c077f4be2334a5 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -14,6 +14,7 @@ const { ObjectGetOwnPropertyDescriptors, ObjectGetPrototypeOf, ObjectFreeze, + ObjectPrototypeHasOwnProperty, ObjectSetPrototypeOf, Promise, ReflectApply, @@ -507,6 +508,35 @@ ObjectFreeze(kEnumerableProperty); const kEmptyObject = ObjectFreeze(ObjectCreate(null)); +function filterOwnProperties(source, keys) { + const filtered = ObjectCreate(null); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if (ObjectPrototypeHasOwnProperty(source, key)) { + filtered[key] = source[key]; + } + } + + return filtered; +} + +/** + * Mimics `obj[key] = value` but ignoring potential prototype inheritance. + * @param {any} obj + * @param {string} key + * @param {any} value + * @returns {any} + */ +function setOwnProperty(obj, key, value) { + return ObjectDefineProperty(obj, key, { + __proto__: null, + configurable: true, + enumerable: true, + value, + writable: true, + }); +} + module.exports = { assertCrypto, cachedResult, @@ -519,6 +549,7 @@ module.exports = { emitExperimentalWarning, exposeInterface, filterDuplicateStrings, + filterOwnProperties, getConstructorOf, getSystemErrorMap, getSystemErrorName, @@ -549,4 +580,5 @@ module.exports = { kEmptyObject, kEnumerableProperty, + setOwnProperty, }; diff --git a/test/fixtures/es-module-specifiers/index.mjs b/test/fixtures/es-module-specifiers/index.mjs index 2be7048513f287..8c361af157b70c 100644 --- a/test/fixtures/es-module-specifiers/index.mjs +++ b/test/fixtures/es-module-specifiers/index.mjs @@ -1,10 +1,11 @@ import explicit from 'explicit-main'; import implicit from 'implicit-main'; import implicitModule from 'implicit-main-type-module'; +import noMain from 'no-main-field'; function getImplicitCommonjs () { return import('implicit-main-type-commonjs'); } -export {explicit, implicit, implicitModule, getImplicitCommonjs}; +export {explicit, implicit, implicitModule, getImplicitCommonjs, noMain}; export default 'success'; diff --git a/test/fixtures/es-module-specifiers/node_modules/no-main-field/index.js b/test/fixtures/es-module-specifiers/node_modules/no-main-field/index.js new file mode 100644 index 00000000000000..528a6ff2acbf84 --- /dev/null +++ b/test/fixtures/es-module-specifiers/node_modules/no-main-field/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = 'no main field'; diff --git a/test/fixtures/es-module-specifiers/node_modules/no-main-field/package.json b/test/fixtures/es-module-specifiers/node_modules/no-main-field/package.json new file mode 100644 index 00000000000000..0967ef424bce67 --- /dev/null +++ b/test/fixtures/es-module-specifiers/node_modules/no-main-field/package.json @@ -0,0 +1 @@ +{} diff --git a/test/parallel/test-module-prototype-mutation.js b/test/parallel/test-module-prototype-mutation.js new file mode 100644 index 00000000000000..b21dba87b6aa40 --- /dev/null +++ b/test/parallel/test-module-prototype-mutation.js @@ -0,0 +1,43 @@ +'use strict'; +const common = require('../common'); +const fixtures = require('../common/fixtures'); +const assert = require('assert'); + +Object.defineProperty(Object.prototype, 'name', { + __proto__: null, + get: common.mustNotCall('get %Object.prototype%.name'), + set: common.mustNotCall('set %Object.prototype%.name'), + enumerable: false, +}); +Object.defineProperty(Object.prototype, 'main', { + __proto__: null, + get: common.mustNotCall('get %Object.prototype%.main'), + set: common.mustNotCall('set %Object.prototype%.main'), + enumerable: false, +}); +Object.defineProperty(Object.prototype, 'type', { + __proto__: null, + get: common.mustNotCall('get %Object.prototype%.type'), + set: common.mustNotCall('set %Object.prototype%.type'), + enumerable: false, +}); +Object.defineProperty(Object.prototype, 'exports', { + __proto__: null, + get: common.mustNotCall('get %Object.prototype%.exports'), + set: common.mustNotCall('set %Object.prototype%.exports'), + enumerable: false, +}); +Object.defineProperty(Object.prototype, 'imports', { + __proto__: null, + get: common.mustNotCall('get %Object.prototype%.imports'), + set: common.mustNotCall('set %Object.prototype%.imports'), + enumerable: false, +}); + +assert.strictEqual( + require(fixtures.path('es-module-specifiers', 'node_modules', 'no-main-field')), + 'no main field' +); + +import(fixtures.fileURL('es-module-specifiers', 'index.mjs')) + .then(common.mustCall((module) => assert.strictEqual(module.noMain, 'no main field'))); From c02bbdd9211e633bf01440b1a125c6de49606c07 Mon Sep 17 00:00:00 2001 From: alexcfyung Date: Tue, 8 Mar 2022 13:10:30 -0500 Subject: [PATCH 074/177] lib: pass env variables to child process on z/OS PR-URL: https://github.com/nodejs/node/pull/42255 Reviewed-By: Michael Dawson --- lib/child_process.js | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/lib/child_process.js b/lib/child_process.js index e80e4003dc46e6..6ce21363e1ee6a 100644 --- a/lib/child_process.js +++ b/lib/child_process.js @@ -93,6 +93,8 @@ const { const MAX_BUFFER = 1024 * 1024; +const isZOS = process.platform === 'os390'; + /** * Spawns a new Node.js process + fork. * @param {string|URL} modulePath @@ -524,6 +526,14 @@ ObjectDefineProperty(execFile, promisify.custom, { value: customPromiseExecFunction(execFile) }); +function copyProcessEnvToEnv(env, name, optionEnv) { + if (process.env[name] && + (!optionEnv || + !ObjectPrototypeHasOwnProperty(optionEnv, name))) { + env[name] = process.env[name]; + } +} + function normalizeSpawnArguments(file, args, options) { validateString(file, 'file'); @@ -630,9 +640,19 @@ function normalizeSpawnArguments(file, args, options) { // process.env.NODE_V8_COVERAGE always propagates, making it possible to // collect coverage for programs that spawn with white-listed environment. - if (process.env.NODE_V8_COVERAGE && - !ObjectPrototypeHasOwnProperty(options.env || {}, 'NODE_V8_COVERAGE')) { - env.NODE_V8_COVERAGE = process.env.NODE_V8_COVERAGE; + copyProcessEnvToEnv(env, 'NODE_V8_COVERAGE', options.env); + + if (isZOS) { + // The following environment variables must always propagate if set. + copyProcessEnvToEnv(env, '_BPXK_AUTOCVT', options.env); + copyProcessEnvToEnv(env, '_CEE_RUNOPTS', options.env); + copyProcessEnvToEnv(env, '_TAG_REDIR_ERR', options.env); + copyProcessEnvToEnv(env, '_TAG_REDIR_IN', options.env); + copyProcessEnvToEnv(env, '_TAG_REDIR_OUT', options.env); + copyProcessEnvToEnv(env, 'STEPLIB', options.env); + copyProcessEnvToEnv(env, 'LIBPATH', options.env); + copyProcessEnvToEnv(env, '_EDC_SIG_DFLT', options.env); + copyProcessEnvToEnv(env, '_EDC_SUSV3', options.env); } let envKeys = []; From 194587e767720a59fb8491a98dd08a95ddebbcd8 Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Thu, 4 Aug 2022 21:23:58 -0700 Subject: [PATCH 075/177] deps: upgrade npm to 8.16.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/44119 Reviewed-By: Ruy Adorno Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Myles Borins --- deps/npm/docs/content/commands/npm-ci.md | 2 +- deps/npm/docs/content/commands/npm-exec.md | 2 +- deps/npm/docs/content/commands/npm-query.md | 235 +++ deps/npm/docs/content/using-npm/config.md | 20 +- .../content/using-npm/dependency-selectors.md | 168 ++ deps/npm/docs/output/commands/npm-ci.html | 2 +- deps/npm/docs/output/commands/npm-exec.html | 2 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm-query.html | 363 ++++ deps/npm/docs/output/commands/npm.html | 2 +- deps/npm/docs/output/using-npm/config.html | 18 +- .../using-npm/dependency-selectors.html | 301 ++++ deps/npm/lib/commands/exec.js | 43 +- deps/npm/lib/commands/init.js | 10 +- deps/npm/lib/commands/ls.js | 2 +- deps/npm/lib/commands/query.js | 104 ++ .../lib/exec/get-workspace-location-msg.js | 25 - deps/npm/lib/utils/cmd-list.js | 1 + deps/npm/lib/utils/config/definitions.js | 20 +- deps/npm/lib/utils/open-url-prompt.js | 9 + deps/npm/man/man1/npm-access.1 | 2 +- deps/npm/man/man1/npm-adduser.1 | 2 +- deps/npm/man/man1/npm-audit.1 | 2 +- deps/npm/man/man1/npm-bin.1 | 2 +- deps/npm/man/man1/npm-bugs.1 | 2 +- deps/npm/man/man1/npm-cache.1 | 2 +- deps/npm/man/man1/npm-ci.1 | 4 +- deps/npm/man/man1/npm-completion.1 | 2 +- deps/npm/man/man1/npm-config.1 | 2 +- deps/npm/man/man1/npm-dedupe.1 | 2 +- deps/npm/man/man1/npm-deprecate.1 | 2 +- deps/npm/man/man1/npm-diff.1 | 2 +- deps/npm/man/man1/npm-dist-tag.1 | 2 +- deps/npm/man/man1/npm-docs.1 | 2 +- deps/npm/man/man1/npm-doctor.1 | 2 +- deps/npm/man/man1/npm-edit.1 | 2 +- deps/npm/man/man1/npm-exec.1 | 4 +- deps/npm/man/man1/npm-explain.1 | 2 +- deps/npm/man/man1/npm-explore.1 | 2 +- deps/npm/man/man1/npm-find-dupes.1 | 2 +- deps/npm/man/man1/npm-fund.1 | 2 +- deps/npm/man/man1/npm-help-search.1 | 2 +- deps/npm/man/man1/npm-help.1 | 2 +- deps/npm/man/man1/npm-hook.1 | 2 +- deps/npm/man/man1/npm-init.1 | 2 +- deps/npm/man/man1/npm-install-ci-test.1 | 2 +- deps/npm/man/man1/npm-install-test.1 | 2 +- deps/npm/man/man1/npm-install.1 | 2 +- deps/npm/man/man1/npm-link.1 | 2 +- deps/npm/man/man1/npm-logout.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 4 +- deps/npm/man/man1/npm-org.1 | 2 +- deps/npm/man/man1/npm-outdated.1 | 2 +- deps/npm/man/man1/npm-owner.1 | 2 +- deps/npm/man/man1/npm-pack.1 | 2 +- deps/npm/man/man1/npm-ping.1 | 2 +- deps/npm/man/man1/npm-pkg.1 | 2 +- deps/npm/man/man1/npm-prefix.1 | 2 +- deps/npm/man/man1/npm-profile.1 | 2 +- deps/npm/man/man1/npm-prune.1 | 2 +- deps/npm/man/man1/npm-publish.1 | 2 +- deps/npm/man/man1/npm-query.1 | 240 +++ deps/npm/man/man1/npm-rebuild.1 | 2 +- deps/npm/man/man1/npm-repo.1 | 2 +- deps/npm/man/man1/npm-restart.1 | 2 +- deps/npm/man/man1/npm-root.1 | 2 +- deps/npm/man/man1/npm-run-script.1 | 2 +- deps/npm/man/man1/npm-search.1 | 2 +- deps/npm/man/man1/npm-set-script.1 | 2 +- deps/npm/man/man1/npm-shrinkwrap.1 | 2 +- deps/npm/man/man1/npm-star.1 | 2 +- deps/npm/man/man1/npm-stars.1 | 2 +- deps/npm/man/man1/npm-start.1 | 2 +- deps/npm/man/man1/npm-stop.1 | 2 +- deps/npm/man/man1/npm-team.1 | 2 +- deps/npm/man/man1/npm-test.1 | 2 +- deps/npm/man/man1/npm-token.1 | 2 +- deps/npm/man/man1/npm-uninstall.1 | 2 +- deps/npm/man/man1/npm-unpublish.1 | 2 +- deps/npm/man/man1/npm-unstar.1 | 2 +- deps/npm/man/man1/npm-update.1 | 2 +- deps/npm/man/man1/npm-version.1 | 2 +- deps/npm/man/man1/npm-view.1 | 2 +- deps/npm/man/man1/npm-whoami.1 | 2 +- deps/npm/man/man1/npm.1 | 4 +- deps/npm/man/man1/npx.1 | 2 +- deps/npm/man/man5/folders.5 | 2 +- deps/npm/man/man5/install.5 | 2 +- deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +- deps/npm/man/man5/npmrc.5 | 2 +- deps/npm/man/man5/package-json.5 | 2 +- deps/npm/man/man5/package-lock-json.5 | 2 +- deps/npm/man/man7/config.7 | 22 +- deps/npm/man/man7/dependency-selectors.7 | 245 +++ deps/npm/man/man7/developers.7 | 2 +- deps/npm/man/man7/logging.7 | 2 +- deps/npm/man/man7/orgs.7 | 2 +- deps/npm/man/man7/package-spec.7 | 2 +- deps/npm/man/man7/registry.7 | 2 +- deps/npm/man/man7/removal.7 | 2 +- deps/npm/man/man7/scope.7 | 2 +- deps/npm/man/man7/scripts.7 | 2 +- deps/npm/man/man7/workspaces.7 | 2 +- .../arborist/lib/arborist/build-ideal-tree.js | 3 +- .../@npmcli/arborist/lib/arborist/index.js | 4 + .../@npmcli/arborist/lib/arborist/reify.js | 10 +- .../@npmcli/arborist/lib/dep-valid.js | 2 +- .../node_modules/@npmcli/arborist/lib/edge.js | 6 +- .../@npmcli/arborist/lib/from-path.js | 19 +- .../node_modules/@npmcli/arborist/lib/node.js | 8 + .../arborist/lib/query-selector-all.js | 561 ++++++ .../@npmcli/arborist/package.json | 4 +- .../fs/lib/common/file-url-to-path/index.js | 17 - .../lib/common/file-url-to-path/polyfill.js | 121 -- .../@npmcli/fs/lib/common/owner-sync.js | 4 +- .../@npmcli/fs/lib/common/owner.js | 4 +- deps/npm/node_modules/@npmcli/fs/lib/index.js | 2 +- deps/npm/node_modules/@npmcli/fs/lib/mkdir.js | 19 + .../@npmcli/fs/lib/mkdir/index.js | 29 - .../@npmcli/fs/lib/mkdir/polyfill.js | 81 - .../@npmcli/fs/lib/with-temp-dir.js | 2 +- deps/npm/node_modules/@npmcli/fs/package.json | 8 +- deps/npm/node_modules/@npmcli/query/LICENSE | 20 + .../node_modules/@npmcli/query/lib/index.js | 187 ++ .../node_modules/@npmcli/query/package.json | 61 + .../@npmcli/run-script/lib/make-spawn-args.js | 7 +- .../@npmcli/run-script/lib/run-script-pkg.js | 2 + .../@npmcli/run-script/lib/set-path.js | 12 +- .../@npmcli/run-script/package.json | 2 +- deps/npm/node_modules/cssesc/LICENSE-MIT.txt | 20 + deps/npm/node_modules/cssesc/README.md | 201 +++ deps/npm/node_modules/cssesc/bin/cssesc | 116 ++ deps/npm/node_modules/cssesc/cssesc.js | 110 ++ deps/npm/node_modules/cssesc/man/cssesc.1 | 70 + deps/npm/node_modules/cssesc/package.json | 51 + .../libnpmexec/lib/cache-install-dir.js | 20 - .../libnpmexec/lib/file-exists.js | 22 +- deps/npm/node_modules/libnpmexec/lib/index.js | 234 +-- .../node_modules/libnpmexec/lib/run-script.js | 8 +- deps/npm/node_modules/libnpmexec/package.json | 6 +- .../npm/node_modules/npm-profile/lib/index.js | 9 +- .../npm/node_modules/npm-profile/package.json | 2 +- .../postcss-selector-parser/API.md | 873 +++++++++ .../postcss-selector-parser/LICENSE-MIT | 22 + .../postcss-selector-parser/dist/index.js | 24 + .../postcss-selector-parser/dist/parser.js | 1243 +++++++++++++ .../postcss-selector-parser/dist/processor.js | 206 +++ .../dist/selectors/attribute.js | 515 ++++++ .../dist/selectors/className.js | 69 + .../dist/selectors/combinator.js | 31 + .../dist/selectors/comment.js | 31 + .../dist/selectors/constructors.js | 102 ++ .../dist/selectors/container.js | 395 ++++ .../dist/selectors/guards.js | 64 + .../dist/selectors/id.js | 37 + .../dist/selectors/index.js | 27 + .../dist/selectors/namespace.js | 101 ++ .../dist/selectors/nesting.js | 32 + .../dist/selectors/node.js | 239 +++ .../dist/selectors/pseudo.js | 38 + .../dist/selectors/root.js | 60 + .../dist/selectors/selector.js | 31 + .../dist/selectors/string.js | 31 + .../dist/selectors/tag.js | 31 + .../dist/selectors/types.js | 28 + .../dist/selectors/universal.js | 32 + .../dist/sortAscending.js | 13 + .../dist/tokenTypes.js | 95 + .../postcss-selector-parser/dist/tokenize.js | 271 +++ .../dist/util/ensureObject.js | 22 + .../dist/util/getProp.js | 24 + .../dist/util/index.js | 22 + .../dist/util/stripComments.js | 27 + .../dist/util/unesc.js | 93 + .../postcss-selector-parser/package.json | 78 + .../postcss-selector-parser.d.ts | 555 ++++++ deps/npm/package.json | 4 +- .../test/lib/commands/completion.js.test.cjs | 1 + .../test/lib/commands/config.js.test.cjs | 2 + .../test/lib/commands/ls.js.test.cjs | 7 + .../test/lib/commands/query.js.test.cjs | 221 +++ .../test/lib/load-all-commands.js.test.cjs | 14 + .../tap-snapshots/test/lib/npm.js.test.cjs | 20 +- .../test/lib/utils/cmd-list.js.test.cjs | 6 + .../lib/utils/config/definitions.js.test.cjs | 20 +- .../lib/utils/config/describe-all.js.test.cjs | 20 +- deps/npm/test/fixtures/mock-npm.js | 2 +- deps/npm/test/lib/commands/exec.js | 1592 +---------------- deps/npm/test/lib/commands/ls.js | 38 + deps/npm/test/lib/commands/query.js | 199 +++ deps/npm/test/lib/utils/open-url-prompt.js | 29 + 191 files changed, 9588 insertions(+), 2119 deletions(-) create mode 100644 deps/npm/docs/content/commands/npm-query.md create mode 100644 deps/npm/docs/content/using-npm/dependency-selectors.md create mode 100644 deps/npm/docs/output/commands/npm-query.html create mode 100644 deps/npm/docs/output/using-npm/dependency-selectors.html create mode 100644 deps/npm/lib/commands/query.js delete mode 100644 deps/npm/lib/exec/get-workspace-location-msg.js create mode 100644 deps/npm/man/man1/npm-query.1 create mode 100644 deps/npm/man/man7/dependency-selectors.7 create mode 100644 deps/npm/node_modules/@npmcli/arborist/lib/query-selector-all.js delete mode 100644 deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js delete mode 100644 deps/npm/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js create mode 100644 deps/npm/node_modules/@npmcli/fs/lib/mkdir.js delete mode 100644 deps/npm/node_modules/@npmcli/fs/lib/mkdir/index.js delete mode 100644 deps/npm/node_modules/@npmcli/fs/lib/mkdir/polyfill.js create mode 100644 deps/npm/node_modules/@npmcli/query/LICENSE create mode 100644 deps/npm/node_modules/@npmcli/query/lib/index.js create mode 100644 deps/npm/node_modules/@npmcli/query/package.json create mode 100644 deps/npm/node_modules/cssesc/LICENSE-MIT.txt create mode 100644 deps/npm/node_modules/cssesc/README.md create mode 100755 deps/npm/node_modules/cssesc/bin/cssesc create mode 100644 deps/npm/node_modules/cssesc/cssesc.js create mode 100644 deps/npm/node_modules/cssesc/man/cssesc.1 create mode 100644 deps/npm/node_modules/cssesc/package.json delete mode 100644 deps/npm/node_modules/libnpmexec/lib/cache-install-dir.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/API.md create mode 100644 deps/npm/node_modules/postcss-selector-parser/LICENSE-MIT create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/index.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/parser.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/processor.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/attribute.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/className.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/combinator.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/comment.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/constructors.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/container.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/guards.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/id.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/index.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/namespace.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/nesting.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/node.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/pseudo.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/root.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/selector.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/string.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/tag.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/types.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/selectors/universal.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/sortAscending.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/tokenTypes.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/tokenize.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/util/ensureObject.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/util/getProp.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/util/index.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/util/stripComments.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/dist/util/unesc.js create mode 100644 deps/npm/node_modules/postcss-selector-parser/package.json create mode 100644 deps/npm/node_modules/postcss-selector-parser/postcss-selector-parser.d.ts create mode 100644 deps/npm/tap-snapshots/test/lib/commands/query.js.test.cjs create mode 100644 deps/npm/test/lib/commands/query.js diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 30a03365ade814..3ecd7c6efb0957 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -67,7 +67,7 @@ $ npm ci added 154 packages in 5s ``` -Configure Travis to build using `npm ci` instead of `npm install`: +Configure Travis CI to build using `npm ci` instead of `npm install`: ```bash # .travis.yml diff --git a/deps/npm/docs/content/commands/npm-exec.md b/deps/npm/docs/content/commands/npm-exec.md index 8ccfa75c73386c..3d8de1ea54ad6f 100644 --- a/deps/npm/docs/content/commands/npm-exec.md +++ b/deps/npm/docs/content/commands/npm-exec.md @@ -127,7 +127,7 @@ $ npm exec -- foo@latest bar --package=@npmcli/foo * Default: * Type: String (can be set multiple times) -The package to install for [`npm exec`](/commands/npm-exec) +The package or packages to install for [`npm exec`](/commands/npm-exec) diff --git a/deps/npm/docs/content/commands/npm-query.md b/deps/npm/docs/content/commands/npm-query.md new file mode 100644 index 00000000000000..6166d5c0e71665 --- /dev/null +++ b/deps/npm/docs/content/commands/npm-query.md @@ -0,0 +1,235 @@ +--- +title: npm-query +section: 1 +description: Dependency selector query +--- + +### Synopsis + + + + + +```bash +npm query +``` + + + + + + +### Description + +The `npm query` command allows for usage of css selectors in order to retrieve +an array of dependency objects. + +### Piping npm query to other commands + +```bash +# find all dependencies with postinstall scripts & uninstall them +npm query ":attr(scripts, [postinstall])" | jq 'map(.name)|join("\n")' -r | xargs -I {} npm uninstall {} + +# find all git dependencies & explain who requires them +npm query ":type(git)" | jq 'map(.name)' | xargs -I {} npm why {} +``` + +### Extended Use Cases & Queries + +```stylus +// all deps +* + +// all direct deps +:root > * + +// direct production deps +:root > .prod + +// direct development deps +:root > .dev + +// any peer dep of a direct deps +:root > * > .peer + +// any workspace dep +.workspace + +// all workspaces that depend on another workspace +.workspace > .workspace + +// all workspaces that have peer deps +.workspace:has(.peer) + +// any dep named "lodash" +// equivalent to [name="lodash"] +#lodash + +// any deps named "lodash" & within semver range ^"1.2.3" +#lodash@^1.2.3 +// equivalent to... +[name="lodash"]:semver(^1.2.3) + +// get the hoisted node for a given semver range +#lodash@^1.2.3:not(:deduped) + +// querying deps with a specific version +#lodash@2.1.5 +// equivalent to... +[name="lodash"][version="2.1.5"] + +// has any deps +:has(*) + +// deps with no other deps (ie. "leaf" nodes) +:empty + +// manually querying git dependencies +[repository^=github:], +[repository^=git:], +[repository^=https://github.com], +[repository^=http://github.com], +[repository^=https://github.com], +[repository^=+git:...] + +// querying for all git dependencies +:type(git) + +// get production dependencies that aren't also dev deps +.prod:not(.dev) + +// get dependencies with specific licenses +[license=MIT], [license=ISC] + +// find all packages that have @ruyadorno as a contributor +:attr(contributors, [email=ruyadorno@github.com]) +``` + +### Example Response Output + +- an array of dependency objects is returned which can contain multiple copies of the same package which may or may not have been linked or deduped + +```json +[ + { + "name": "", + "version": "", + "description": "", + "homepage": "", + "bugs": {}, + "author": {}, + "license": {}, + "funding": {}, + "files": [], + "main": "", + "browser": "", + "bin": {}, + "man": [], + "directories": {}, + "repository": {}, + "scripts": {}, + "config": {}, + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "bundledDependencies": {}, + "peerDependencies": {}, + "peerDependenciesMeta": {}, + "engines": {}, + "os": [], + "cpu": [], + "workspaces": {}, + "keywords": [], + ... + }, + ... +``` + +### Configuration + + + + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + + + + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result in selecting all + workspaces within that folder) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + + + + +#### `workspaces` + +* Default: null +* Type: null or Boolean + +Set to true to run the command in the context of **all** configured +workspaces. + +Explicitly setting this to false will cause commands like `install` to +ignore workspaces altogether. When not set explicitly: + +- Commands that operate on the `node_modules` tree (install, update, etc.) +will link workspaces into the `node_modules` folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +_unless_ one or more workspaces are specified in the `workspace` config. + +This value is not exported to the environment for child processes. + + + + +#### `include-workspace-root` + +* Default: false +* Type: Boolean + +Include the workspace root when workspaces are enabled for a command. + +When false, specifying individual workspaces via the `workspace` config, or +all workspaces via the `workspaces` flag, will cause npm to operate only on +the specified workspaces, and not on the root project. + +This value is not exported to the environment for child processes. + + + + + +## See Also + +* [dependency selector](/using-npm/dependency-selector) diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md index e3e1bd6c73bb3b..cd13237f34dd38 100644 --- a/deps/npm/docs/content/using-npm/config.md +++ b/deps/npm/docs/content/using-npm/config.md @@ -1244,7 +1244,7 @@ Directory in which `npm pack` will save tarballs. * Default: * Type: String (can be set multiple times) -The package to install for [`npm exec`](/commands/npm-exec) +The package or packages to install for [`npm exec`](/commands/npm-exec) @@ -1393,6 +1393,24 @@ The base URL of the npm registry. +#### `replace-registry-host` + +* Default: "npmjs" +* Type: "npmjs", "never", "always", or String + +Defines behavior for replacing the registry host in a lockfile with the +configured registry. + +The default behavior is to replace package dist URLs from the default +registry (https://registry.npmjs.org) to the configured registry. If set to +"never", then use the registry value. If set to "always", then replace the +registry host with the configured host every time. + +You may also specify a bare hostname (e.g., "registry.npmjs.org"). + + + + #### `save` * Default: `true` unless when using `npm update` where it defaults to `false` diff --git a/deps/npm/docs/content/using-npm/dependency-selectors.md b/deps/npm/docs/content/using-npm/dependency-selectors.md new file mode 100644 index 00000000000000..9d65baf631a7ef --- /dev/null +++ b/deps/npm/docs/content/using-npm/dependency-selectors.md @@ -0,0 +1,168 @@ +--- +title: Dependency Selector Syntax & Querying +section: 7 +description: Dependency Selector Syntax & Querying +--- + +### Description + +The [`npm query`](/commands/npm-query) commmand exposes a new dependency selector syntax (informed by & respecting many aspects of the [CSS Selectors 4 Spec](https://dev.w3.org/csswg/selectors4/#relational)) which: + +- Standardizes the shape of, & querying of, dependency graphs with a robust object model, metadata & selector syntax +- Leverages existing, known language syntax & operators from CSS to make disparate package information broadly accessible +- Unlocks the ability to answer complex, multi-faceted questions about dependencies, their relationships & associative metadata +- Consolidates redundant logic of similar query commands in `npm` (ex. `npm fund`, `npm ls`, `npm outdated`, `npm audit` ...) + +### Dependency Selector Syntax `v1.0.0` + +#### Overview: + +- there is no "type" or "tag" selectors (ex. `div, h1, a`) as a dependency/target is the only type of `Node` that can be queried +- the term "dependencies" is in reference to any `Node` found in a `tree` returned by `Arborist` + +#### Combinators + +- `>` direct descendant/child +- ` ` any descendant/child +- `~` sibling + +#### Selectors + +- `*` universal selector +- `#` dependency selector (equivalent to `[name="..."]`) +- `#@` (equivalent to `[name=]:semver()`) +- `,` selector list delimiter +- `.` dependency type selector +- `:` pseudo selector + +#### Dependency Type Selectors + +- `.prod` dependency found in the `dependencies` section of `package.json`, or is a child of said dependency +- `.dev` dependency found in the `devDependencies` section of `package.json`, or is a child of said dependency +- `.optional` dependency found in the `optionalDependencies` section of `package.json`, or has `"optional": true` set in its entry in the `peerDependenciesMeta` section of `package.json`, or a child of said dependency +- `.peer` dependency found in the `peerDependencies` section of `package.json` +- `.workspace` dependency found in the [`workspaces`](https://docs.npmjs.com/cli/v8/using-npm/workspaces) section of `package.json` +- `.bundled` dependency found in the `bundleDependencies` section of `package.json`, or is a child of said dependency + +#### Pseudo Selectors +- [`:not()`](https://developer.mozilla.org/en-US/docs/Web/CSS/:not) +- [`:has()`](https://developer.mozilla.org/en-US/docs/Web/CSS/:has) +- [`:is()`](https://developer.mozilla.org/en-US/docs/Web/CSS/:is) +- [`:root`](https://developer.mozilla.org/en-US/docs/Web/CSS/:root) matches the root node/dependency +- [`:scope`](https://developer.mozilla.org/en-US/docs/Web/CSS/:scope) matches node/dependency it was queried against +- [`:empty`](https://developer.mozilla.org/en-US/docs/Web/CSS/:empty) when a dependency has no dependencies +- [`:private`](https://docs.npmjs.com/cli/v8/configuring-npm/package-json#private) when a dependency is private +- `:link` when a dependency is linked (for instance, workspaces or packages manually [`linked`](https://docs.npmjs.com/cli/v8/commands/npm-link) +- `:deduped` when a dependency has been deduped (note that this does *not* always mean the dependency has been hoisted to the root of node_modules) +- `:override` when a dependency is an override (not implemented yet) +- `:extraneous` when a dependency exists but is not defined as a dependency of any node +- `:invalid` when a dependency version is out of its ancestors specified range +- `:missing` when a dependency is not found on disk +- `:semver()` matching a valid [`node-semver`](https://github.com/npm/node-semver) spec +- `:path()` [glob](https://www.npmjs.com/package/glob) matching based on dependencies path relative to the project +- `:type()` [based on currently recognized types](https://github.com/npm/npm-package-arg#result-object) + +#### [Attribute Selectors](https://developer.mozilla.org/en-US/docs/Web/CSS/Attribute_selectors) + +The attribute selector evaluates the key/value pairs in `package.json` if they are `String`s. + +- `[]` attribute selector (ie. existence of attribute) +- `[attribute=value]` attribute value is equivalant... +- `[attribute~=value]` attribute value contains word... +- `[attribute*=value]` attribute value contains string... +- `[attribute|=value]` attribute value is equal to or starts with... +- `[attribute^=value]` attribute value starts with... +- `[attribute$=value]` attribute value ends with... + +#### `Array` & `Object` Attribute Selectors + +The generic `:attr()` pseudo selector standardizes a pattern which can be used for attribute selection of `Object`s, `Array`s or `Arrays` of `Object`s accessible via `Arborist`'s `Node.package` metadata. This allows for iterative attribute selection beyond top-level `String` evaluation. The last argument passed to `:attr()` must be an `attribute` selector or a nested `:attr()`. See examples below: + +#### `Objects` + +```css +/* return dependencies that have a `scripts.test` containing `"tap"` */ +*:attr(scripts, [test~=tap]) +``` + +#### Nested `Objects` + +Nested objects are expressed as sequential arguments to `:attr()`. + +```css +/* return dependencies that have a testling config for opera browsers */ +*:attr(testling, browsers, [~=opera]) +``` + +#### `Arrays` + +`Array`s specifically uses a special/reserved `.` character in place of a typical attribute name. `Arrays` also support exact `value` matching when a `String` is passed to the selector. + +##### Example of an `Array` Attribute Selection: +```css +/* removes the distinction between properties & arrays */ +/* ie. we'd have to check the property & iterate to match selection */ +*:attr([keywords^=react]) +*:attr(contributors, :attr([name~=Jordan])) +``` + +##### Example of an `Array` matching directly to a value: +```css +/* return dependencies that have the exact keyword "react" */ +/* this is equivalent to `*:keywords([value="react"])` */ +*:attr([keywords=react]) +``` + +##### Example of an `Array` of `Object`s: +```css +/* returns */ +*:attr(contributors, [email=ruyadorno@github.com]) +``` + +### Groups + +Dependency groups are defined by the package relationships to their ancestors (ie. the dependency types that are defined in `package.json`). This approach is user-centric as the ecosystem has been taught to think about dependencies in these groups first-and-foremost. Dependencies are allowed to be included in multiple groups (ex. a `prod` dependency may also be a `dev` dependency (in that it's also required by another `dev` dependency) & may also be `bundled` - a selector for that type of dependency would look like: `*.prod.dev.bundled`). + +- `.prod` +- `.dev` +- `.optional` +- `.peer` +- `.bundled` +- `.workspace` + +Please note that currently `workspace` deps are always `prod` dependencies. Additionally the `.root` dependency is also considered a `prod` dependency. + +### Programmatic Usage + +- `Arborist`'s `Node` Class has a `.querySelectorAll()` method + - this method will return a filtered, flattened dependency Arborist `Node` list based on a valid query selector + +```js +const Arborist = require('@npmcli/arborist') +const arb = new Arborist({}) +``` + +```js +// root-level +arb.loadActual((tree) => { + // query all production dependencies + const results = await tree.querySelectorAll('.prod') + console.log(results) +}) +``` + +```js +// iterative +arb.loadActual((tree) => { + // query for the deduped version of react + const results = await tree.querySelectorAll('#react:not(:deduped)') + // query the deduped react for git deps + const deps = await results[0].querySelectorAll(':type(git)') + console.log(deps) +}) +``` + +## See Also + +* [npm query](/commands/npm-query) +* [@npmcli/arborist](https://npm.im/@npmcli/arborist] diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 51efa6ea98bc07..a67ad8237e7548 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -192,7 +192,7 @@

    Example

    $ npm ci
     added 154 packages in 5s
     
    -

    Configure Travis to build using npm ci instead of npm install:

    +

    Configure Travis CI to build using npm ci instead of npm install:

    # .travis.yml
     install:
     - npm ci
    diff --git a/deps/npm/docs/output/commands/npm-exec.html b/deps/npm/docs/output/commands/npm-exec.html
    index 7a5195ba4a2630..917d71db366883 100644
    --- a/deps/npm/docs/output/commands/npm-exec.html
    +++ b/deps/npm/docs/output/commands/npm-exec.html
    @@ -236,7 +236,7 @@ 

    package

  • Default:
  • Type: String (can be set multiple times)
  • -

    The package to install for npm exec

    +

    The package or packages to install for npm exec

    call

    diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index ce8eda1b71176d..5641195679cffd 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -166,7 +166,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@8.15.1 /path/to/npm
    +
    npm@8.16.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm-query.html b/deps/npm/docs/output/commands/npm-query.html new file mode 100644 index 00000000000000..1a11fb9e8cd8b3 --- /dev/null +++ b/deps/npm/docs/output/commands/npm-query.html @@ -0,0 +1,363 @@ + + +npm-query + + + + + +
    +
    +

    npm-query

    +Dependency selector query +
    + +
    +

    Table of contents

    + +
    + +

    Synopsis

    + + + +
    npm query <selector>
    +
    + + + +

    Description

    +

    The npm query command allows for usage of css selectors in order to retrieve +an array of dependency objects.

    +

    Piping npm query to other commands

    +
    # find all dependencies with postinstall scripts & uninstall them
    +npm query ":attr(scripts, [postinstall])" | jq 'map(.name)|join("\n")' -r | xargs -I {} npm uninstall {}
    +
    +# find all git dependencies & explain who requires them
    +npm query ":type(git)" | jq 'map(.name)' | xargs -I {} npm why {}
    +
    +

    Extended Use Cases & Queries

    +
    // all deps
    +*
    +
    +// all direct deps
    +:root > *
    +
    +// direct production deps
    +:root > .prod
    +
    +// direct development deps
    +:root > .dev
    +
    +// any peer dep of a direct deps
    +:root > * > .peer
    +
    +// any workspace dep
    +.workspace
    +
    +// all workspaces that depend on another workspace
    +.workspace > .workspace
    +
    +// all workspaces that have peer deps
    +.workspace:has(.peer)
    +
    +// any dep named "lodash"
    +// equivalent to [name="lodash"]
    +#lodash
    +
    +// any deps named "lodash" & within semver range ^"1.2.3"
    +#lodash@^1.2.3
    +// equivalent to...
    +[name="lodash"]:semver(^1.2.3)
    +
    +// get the hoisted node for a given semver range
    +#lodash@^1.2.3:not(:deduped)
    +
    +// querying deps with a specific version
    +#lodash@2.1.5
    +// equivalent to...
    +[name="lodash"][version="2.1.5"]
    +
    +// has any deps
    +:has(*)
    +
    +// deps with no other deps (ie. "leaf" nodes)
    +:empty
    +
    +// manually querying git dependencies
    +[repository^=github:],
    +[repository^=git:],
    +[repository^=https://github.com],
    +[repository^=http://github.com],
    +[repository^=https://github.com],
    +[repository^=+git:...]
    +
    +// querying for all git dependencies
    +:type(git)
    +
    +// get production dependencies that aren't also dev deps
    +.prod:not(.dev)
    +
    +// get dependencies with specific licenses
    +[license=MIT], [license=ISC]
    +
    +// find all packages that have @ruyadorno as a contributor
    +:attr(contributors, [email=ruyadorno@github.com])
    +
    +

    Example Response Output

    +
      +
    • an array of dependency objects is returned which can contain multiple copies of the same package which may or may not have been linked or deduped
    • +
    +
    [
    +  {
    +    "name": "",
    +    "version": "",
    +    "description": "",
    +    "homepage": "",
    +    "bugs": {},
    +    "author": {},
    +    "license": {},
    +    "funding": {},
    +    "files": [],
    +    "main": "",
    +    "browser": "",
    +    "bin": {},
    +    "man": [],
    +    "directories": {},
    +    "repository": {},
    +    "scripts": {},
    +    "config": {},
    +    "dependencies": {},
    +    "devDependencies": {},
    +    "optionalDependencies": {},
    +    "bundledDependencies": {},
    +    "peerDependencies": {},
    +    "peerDependenciesMeta": {},
    +    "engines": {},
    +    "os": [],
    +    "cpu": [],
    +    "workspaces": {},
    +    "keywords": [],
    +    ...
    +  },
    +  ...
    +
    +

    Configuration

    + + + +

    global

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Operates in "global" mode, so that packages are installed into the prefix +folder instead of the current working directory. See +folders for more on the differences in behavior.

    +
      +
    • packages are installed into the {prefix}/lib/node_modules folder, instead +of the current working directory.
    • +
    • bin files are linked to {prefix}/bin
    • +
    • man pages are linked to {prefix}/share/man
    • +
    + + +

    workspace

    +
      +
    • Default:
    • +
    • Type: String (can be set multiple times)
    • +
    +

    Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option.

    +

    Valid values for the workspace config are either:

    +
      +
    • Workspace names
    • +
    • Path to a workspace directory
    • +
    • Path to a parent workspace directory (will result in selecting all +workspaces within that folder)
    • +
    +

    When set for the npm init command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project.

    +

    This value is not exported to the environment for child processes.

    + + +

    workspaces

    +
      +
    • Default: null
    • +
    • Type: null or Boolean
    • +
    +

    Set to true to run the command in the context of all configured +workspaces.

    +

    Explicitly setting this to false will cause commands like install to +ignore workspaces altogether. When not set explicitly:

    +
      +
    • Commands that operate on the node_modules tree (install, update, etc.) +will link workspaces into the node_modules folder. - Commands that do +other things (test, exec, publish, etc.) will operate on the root project, +unless one or more workspaces are specified in the workspace config.
    • +
    +

    This value is not exported to the environment for child processes.

    + + +

    include-workspace-root

    +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    Include the workspace root when workspaces are enabled for a command.

    +

    When false, specifying individual workspaces via the workspace config, or +all workspaces via the workspaces flag, will cause npm to operate only on +the specified workspaces, and not on the root project.

    +

    This value is not exported to the environment for child processes.

    + + + +

    See Also

    + +
    + + +
    + + + + \ No newline at end of file diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index b254b72c370861..8573a6d65caf3c 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -149,7 +149,7 @@

    Table of contents

    Version

    -

    8.15.1

    +

    8.16.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 79bf005d61e15e..42880a27025546 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -142,7 +142,7 @@

    config

    Table of contents

    -
    +

    Description

    @@ -1146,7 +1146,7 @@

    package

  • Default:
  • Type: String (can be set multiple times)
  • -

    The package to install for npm exec

    +

    The package or packages to install for npm exec

    package-lock

    @@ -1265,6 +1265,20 @@

    registry

    The base URL of the npm registry.

    +

    replace-registry-host

    +
      +
    • Default: "npmjs"
    • +
    • Type: "npmjs", "never", "always", or String
    • +
    +

    Defines behavior for replacing the registry host in a lockfile with the +configured registry.

    +

    The default behavior is to replace package dist URLs from the default +registry (https://registry.npmjs.org) to the configured registry. If set to +"never", then use the registry value. If set to "always", then replace the +registry host with the configured host every time.

    +

    You may also specify a bare hostname (e.g., "registry.npmjs.org").

    + +

    save